001    // Generated by the protocol buffer compiler.  DO NOT EDIT!
002    // source: fsimage.proto
003    
004    package org.apache.hadoop.hdfs.server.namenode;
005    
006    public final class FsImageProto {
007      private FsImageProto() {}
008      public static void registerAllExtensions(
009          com.google.protobuf.ExtensionRegistry registry) {
010      }
011      public interface FileSummaryOrBuilder
012          extends com.google.protobuf.MessageOrBuilder {
013    
014        // required uint32 ondiskVersion = 1;
015        /**
016         * <code>required uint32 ondiskVersion = 1;</code>
017         *
018         * <pre>
019         * The version of the above EBNF grammars.
020         * </pre>
021         */
022        boolean hasOndiskVersion();
023        /**
024         * <code>required uint32 ondiskVersion = 1;</code>
025         *
026         * <pre>
027         * The version of the above EBNF grammars.
028         * </pre>
029         */
030        int getOndiskVersion();
031    
032        // required uint32 layoutVersion = 2;
033        /**
034         * <code>required uint32 layoutVersion = 2;</code>
035         *
036         * <pre>
037         * layoutVersion describes which features are available in the
038         * FSImage.
039         * </pre>
040         */
041        boolean hasLayoutVersion();
042        /**
043         * <code>required uint32 layoutVersion = 2;</code>
044         *
045         * <pre>
046         * layoutVersion describes which features are available in the
047         * FSImage.
048         * </pre>
049         */
050        int getLayoutVersion();
051    
052        // optional string codec = 3;
053        /**
054         * <code>optional string codec = 3;</code>
055         */
056        boolean hasCodec();
057        /**
058         * <code>optional string codec = 3;</code>
059         */
060        java.lang.String getCodec();
061        /**
062         * <code>optional string codec = 3;</code>
063         */
064        com.google.protobuf.ByteString
065            getCodecBytes();
066    
067        // repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;
068        /**
069         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
070         */
071        java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> 
072            getSectionsList();
073        /**
074         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
075         */
076        org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getSections(int index);
077        /**
078         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
079         */
080        int getSectionsCount();
081        /**
082         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
083         */
084        java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
085            getSectionsOrBuilderList();
086        /**
087         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
088         */
089        org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder getSectionsOrBuilder(
090            int index);
091      }
092      /**
093       * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary}
094       */
095      public static final class FileSummary extends
096          com.google.protobuf.GeneratedMessage
097          implements FileSummaryOrBuilder {
098        // Use FileSummary.newBuilder() to construct.
099        private FileSummary(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
100          super(builder);
101          this.unknownFields = builder.getUnknownFields();
102        }
103        private FileSummary(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
104    
105        private static final FileSummary defaultInstance;
106        public static FileSummary getDefaultInstance() {
107          return defaultInstance;
108        }
109    
110        public FileSummary getDefaultInstanceForType() {
111          return defaultInstance;
112        }
113    
114        private final com.google.protobuf.UnknownFieldSet unknownFields;
115        @java.lang.Override
116        public final com.google.protobuf.UnknownFieldSet
117            getUnknownFields() {
118          return this.unknownFields;
119        }
120        private FileSummary(
121            com.google.protobuf.CodedInputStream input,
122            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
123            throws com.google.protobuf.InvalidProtocolBufferException {
124          initFields();
125          int mutable_bitField0_ = 0;
126          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
127              com.google.protobuf.UnknownFieldSet.newBuilder();
128          try {
129            boolean done = false;
130            while (!done) {
131              int tag = input.readTag();
132              switch (tag) {
133                case 0:
134                  done = true;
135                  break;
136                default: {
137                  if (!parseUnknownField(input, unknownFields,
138                                         extensionRegistry, tag)) {
139                    done = true;
140                  }
141                  break;
142                }
143                case 8: {
144                  bitField0_ |= 0x00000001;
145                  ondiskVersion_ = input.readUInt32();
146                  break;
147                }
148                case 16: {
149                  bitField0_ |= 0x00000002;
150                  layoutVersion_ = input.readUInt32();
151                  break;
152                }
153                case 26: {
154                  bitField0_ |= 0x00000004;
155                  codec_ = input.readBytes();
156                  break;
157                }
158                case 34: {
159                  if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
160                    sections_ = new java.util.ArrayList<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section>();
161                    mutable_bitField0_ |= 0x00000008;
162                  }
163                  sections_.add(input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.PARSER, extensionRegistry));
164                  break;
165                }
166              }
167            }
168          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
169            throw e.setUnfinishedMessage(this);
170          } catch (java.io.IOException e) {
171            throw new com.google.protobuf.InvalidProtocolBufferException(
172                e.getMessage()).setUnfinishedMessage(this);
173          } finally {
174            if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
175              sections_ = java.util.Collections.unmodifiableList(sections_);
176            }
177            this.unknownFields = unknownFields.build();
178            makeExtensionsImmutable();
179          }
180        }
181        public static final com.google.protobuf.Descriptors.Descriptor
182            getDescriptor() {
183          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
184        }
185    
186        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
187            internalGetFieldAccessorTable() {
188          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable
189              .ensureFieldAccessorsInitialized(
190                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Builder.class);
191        }
192    
193        public static com.google.protobuf.Parser<FileSummary> PARSER =
194            new com.google.protobuf.AbstractParser<FileSummary>() {
195          public FileSummary parsePartialFrom(
196              com.google.protobuf.CodedInputStream input,
197              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
198              throws com.google.protobuf.InvalidProtocolBufferException {
199            return new FileSummary(input, extensionRegistry);
200          }
201        };
202    
203        @java.lang.Override
204        public com.google.protobuf.Parser<FileSummary> getParserForType() {
205          return PARSER;
206        }
207    
208        public interface SectionOrBuilder
209            extends com.google.protobuf.MessageOrBuilder {
210    
211          // optional string name = 1;
212          /**
213           * <code>optional string name = 1;</code>
214           */
215          boolean hasName();
216          /**
217           * <code>optional string name = 1;</code>
218           */
219          java.lang.String getName();
220          /**
221           * <code>optional string name = 1;</code>
222           */
223          com.google.protobuf.ByteString
224              getNameBytes();
225    
226          // optional uint64 length = 2;
227          /**
228           * <code>optional uint64 length = 2;</code>
229           */
230          boolean hasLength();
231          /**
232           * <code>optional uint64 length = 2;</code>
233           */
234          long getLength();
235    
236          // optional uint64 offset = 3;
237          /**
238           * <code>optional uint64 offset = 3;</code>
239           */
240          boolean hasOffset();
241          /**
242           * <code>optional uint64 offset = 3;</code>
243           */
244          long getOffset();
245        }
246        /**
247         * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary.Section}
248         *
249         * <pre>
250         * index for each section
251         * </pre>
252         */
253        public static final class Section extends
254            com.google.protobuf.GeneratedMessage
255            implements SectionOrBuilder {
256          // Use Section.newBuilder() to construct.
257          private Section(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
258            super(builder);
259            this.unknownFields = builder.getUnknownFields();
260          }
261          private Section(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
262    
263          private static final Section defaultInstance;
264          public static Section getDefaultInstance() {
265            return defaultInstance;
266          }
267    
268          public Section getDefaultInstanceForType() {
269            return defaultInstance;
270          }
271    
272          private final com.google.protobuf.UnknownFieldSet unknownFields;
273          @java.lang.Override
274          public final com.google.protobuf.UnknownFieldSet
275              getUnknownFields() {
276            return this.unknownFields;
277          }
278          private Section(
279              com.google.protobuf.CodedInputStream input,
280              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
281              throws com.google.protobuf.InvalidProtocolBufferException {
282            initFields();
283            int mutable_bitField0_ = 0;
284            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
285                com.google.protobuf.UnknownFieldSet.newBuilder();
286            try {
287              boolean done = false;
288              while (!done) {
289                int tag = input.readTag();
290                switch (tag) {
291                  case 0:
292                    done = true;
293                    break;
294                  default: {
295                    if (!parseUnknownField(input, unknownFields,
296                                           extensionRegistry, tag)) {
297                      done = true;
298                    }
299                    break;
300                  }
301                  case 10: {
302                    bitField0_ |= 0x00000001;
303                    name_ = input.readBytes();
304                    break;
305                  }
306                  case 16: {
307                    bitField0_ |= 0x00000002;
308                    length_ = input.readUInt64();
309                    break;
310                  }
311                  case 24: {
312                    bitField0_ |= 0x00000004;
313                    offset_ = input.readUInt64();
314                    break;
315                  }
316                }
317              }
318            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
319              throw e.setUnfinishedMessage(this);
320            } catch (java.io.IOException e) {
321              throw new com.google.protobuf.InvalidProtocolBufferException(
322                  e.getMessage()).setUnfinishedMessage(this);
323            } finally {
324              this.unknownFields = unknownFields.build();
325              makeExtensionsImmutable();
326            }
327          }
328          public static final com.google.protobuf.Descriptors.Descriptor
329              getDescriptor() {
330            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
331          }
332    
333          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
334              internalGetFieldAccessorTable() {
335            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable
336                .ensureFieldAccessorsInitialized(
337                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder.class);
338          }
339    
340          public static com.google.protobuf.Parser<Section> PARSER =
341              new com.google.protobuf.AbstractParser<Section>() {
342            public Section parsePartialFrom(
343                com.google.protobuf.CodedInputStream input,
344                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
345                throws com.google.protobuf.InvalidProtocolBufferException {
346              return new Section(input, extensionRegistry);
347            }
348          };
349    
350          @java.lang.Override
351          public com.google.protobuf.Parser<Section> getParserForType() {
352            return PARSER;
353          }
354    
355          private int bitField0_;
356          // optional string name = 1;
357          public static final int NAME_FIELD_NUMBER = 1;
358          private java.lang.Object name_;
359          /**
360           * <code>optional string name = 1;</code>
361           */
362          public boolean hasName() {
363            return ((bitField0_ & 0x00000001) == 0x00000001);
364          }
365          /**
366           * <code>optional string name = 1;</code>
367           */
368          public java.lang.String getName() {
369            java.lang.Object ref = name_;
370            if (ref instanceof java.lang.String) {
371              return (java.lang.String) ref;
372            } else {
373              com.google.protobuf.ByteString bs = 
374                  (com.google.protobuf.ByteString) ref;
375              java.lang.String s = bs.toStringUtf8();
376              if (bs.isValidUtf8()) {
377                name_ = s;
378              }
379              return s;
380            }
381          }
382          /**
383           * <code>optional string name = 1;</code>
384           */
385          public com.google.protobuf.ByteString
386              getNameBytes() {
387            java.lang.Object ref = name_;
388            if (ref instanceof java.lang.String) {
389              com.google.protobuf.ByteString b = 
390                  com.google.protobuf.ByteString.copyFromUtf8(
391                      (java.lang.String) ref);
392              name_ = b;
393              return b;
394            } else {
395              return (com.google.protobuf.ByteString) ref;
396            }
397          }
398    
399          // optional uint64 length = 2;
400          public static final int LENGTH_FIELD_NUMBER = 2;
401          private long length_;
402          /**
403           * <code>optional uint64 length = 2;</code>
404           */
405          public boolean hasLength() {
406            return ((bitField0_ & 0x00000002) == 0x00000002);
407          }
408          /**
409           * <code>optional uint64 length = 2;</code>
410           */
411          public long getLength() {
412            return length_;
413          }
414    
415          // optional uint64 offset = 3;
416          public static final int OFFSET_FIELD_NUMBER = 3;
417          private long offset_;
418          /**
419           * <code>optional uint64 offset = 3;</code>
420           */
421          public boolean hasOffset() {
422            return ((bitField0_ & 0x00000004) == 0x00000004);
423          }
424          /**
425           * <code>optional uint64 offset = 3;</code>
426           */
427          public long getOffset() {
428            return offset_;
429          }
430    
431          private void initFields() {
432            name_ = "";
433            length_ = 0L;
434            offset_ = 0L;
435          }
436          private byte memoizedIsInitialized = -1;
437          public final boolean isInitialized() {
438            byte isInitialized = memoizedIsInitialized;
439            if (isInitialized != -1) return isInitialized == 1;
440    
441            memoizedIsInitialized = 1;
442            return true;
443          }
444    
445          public void writeTo(com.google.protobuf.CodedOutputStream output)
446                              throws java.io.IOException {
447            getSerializedSize();
448            if (((bitField0_ & 0x00000001) == 0x00000001)) {
449              output.writeBytes(1, getNameBytes());
450            }
451            if (((bitField0_ & 0x00000002) == 0x00000002)) {
452              output.writeUInt64(2, length_);
453            }
454            if (((bitField0_ & 0x00000004) == 0x00000004)) {
455              output.writeUInt64(3, offset_);
456            }
457            getUnknownFields().writeTo(output);
458          }
459    
460          private int memoizedSerializedSize = -1;
461          public int getSerializedSize() {
462            int size = memoizedSerializedSize;
463            if (size != -1) return size;
464    
465            size = 0;
466            if (((bitField0_ & 0x00000001) == 0x00000001)) {
467              size += com.google.protobuf.CodedOutputStream
468                .computeBytesSize(1, getNameBytes());
469            }
470            if (((bitField0_ & 0x00000002) == 0x00000002)) {
471              size += com.google.protobuf.CodedOutputStream
472                .computeUInt64Size(2, length_);
473            }
474            if (((bitField0_ & 0x00000004) == 0x00000004)) {
475              size += com.google.protobuf.CodedOutputStream
476                .computeUInt64Size(3, offset_);
477            }
478            size += getUnknownFields().getSerializedSize();
479            memoizedSerializedSize = size;
480            return size;
481          }
482    
483          private static final long serialVersionUID = 0L;
484          @java.lang.Override
485          protected java.lang.Object writeReplace()
486              throws java.io.ObjectStreamException {
487            return super.writeReplace();
488          }
489    
490          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
491              com.google.protobuf.ByteString data)
492              throws com.google.protobuf.InvalidProtocolBufferException {
493            return PARSER.parseFrom(data);
494          }
495          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
496              com.google.protobuf.ByteString data,
497              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
498              throws com.google.protobuf.InvalidProtocolBufferException {
499            return PARSER.parseFrom(data, extensionRegistry);
500          }
501          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(byte[] data)
502              throws com.google.protobuf.InvalidProtocolBufferException {
503            return PARSER.parseFrom(data);
504          }
505          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
506              byte[] data,
507              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
508              throws com.google.protobuf.InvalidProtocolBufferException {
509            return PARSER.parseFrom(data, extensionRegistry);
510          }
511          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(java.io.InputStream input)
512              throws java.io.IOException {
513            return PARSER.parseFrom(input);
514          }
515          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
516              java.io.InputStream input,
517              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
518              throws java.io.IOException {
519            return PARSER.parseFrom(input, extensionRegistry);
520          }
521          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseDelimitedFrom(java.io.InputStream input)
522              throws java.io.IOException {
523            return PARSER.parseDelimitedFrom(input);
524          }
525          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseDelimitedFrom(
526              java.io.InputStream input,
527              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
528              throws java.io.IOException {
529            return PARSER.parseDelimitedFrom(input, extensionRegistry);
530          }
531          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
532              com.google.protobuf.CodedInputStream input)
533              throws java.io.IOException {
534            return PARSER.parseFrom(input);
535          }
536          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
537              com.google.protobuf.CodedInputStream input,
538              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
539              throws java.io.IOException {
540            return PARSER.parseFrom(input, extensionRegistry);
541          }
542    
543          public static Builder newBuilder() { return Builder.create(); }
544          public Builder newBuilderForType() { return newBuilder(); }
545          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section prototype) {
546            return newBuilder().mergeFrom(prototype);
547          }
548          public Builder toBuilder() { return newBuilder(this); }
549    
550          @java.lang.Override
551          protected Builder newBuilderForType(
552              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
553            Builder builder = new Builder(parent);
554            return builder;
555          }
556          /**
557           * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary.Section}
558           *
559           * <pre>
560           * index for each section
561           * </pre>
562           */
563          public static final class Builder extends
564              com.google.protobuf.GeneratedMessage.Builder<Builder>
565             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder {
566            public static final com.google.protobuf.Descriptors.Descriptor
567                getDescriptor() {
568              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
569            }
570    
571            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
572                internalGetFieldAccessorTable() {
573              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable
574                  .ensureFieldAccessorsInitialized(
575                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder.class);
576            }
577    
578            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.newBuilder()
579            private Builder() {
580              maybeForceBuilderInitialization();
581            }
582    
583            private Builder(
584                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
585              super(parent);
586              maybeForceBuilderInitialization();
587            }
588            private void maybeForceBuilderInitialization() {
589              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
590              }
591            }
592            private static Builder create() {
593              return new Builder();
594            }
595    
596            public Builder clear() {
597              super.clear();
598              name_ = "";
599              bitField0_ = (bitField0_ & ~0x00000001);
600              length_ = 0L;
601              bitField0_ = (bitField0_ & ~0x00000002);
602              offset_ = 0L;
603              bitField0_ = (bitField0_ & ~0x00000004);
604              return this;
605            }
606    
607            public Builder clone() {
608              return create().mergeFrom(buildPartial());
609            }
610    
611            public com.google.protobuf.Descriptors.Descriptor
612                getDescriptorForType() {
613              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
614            }
615    
616            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getDefaultInstanceForType() {
617              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance();
618            }
619    
620            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section build() {
621              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section result = buildPartial();
622              if (!result.isInitialized()) {
623                throw newUninitializedMessageException(result);
624              }
625              return result;
626            }
627    
628            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section buildPartial() {
629              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section(this);
630              int from_bitField0_ = bitField0_;
631              int to_bitField0_ = 0;
632              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
633                to_bitField0_ |= 0x00000001;
634              }
635              result.name_ = name_;
636              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
637                to_bitField0_ |= 0x00000002;
638              }
639              result.length_ = length_;
640              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
641                to_bitField0_ |= 0x00000004;
642              }
643              result.offset_ = offset_;
644              result.bitField0_ = to_bitField0_;
645              onBuilt();
646              return result;
647            }
648    
649            public Builder mergeFrom(com.google.protobuf.Message other) {
650              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section) {
651                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section)other);
652              } else {
653                super.mergeFrom(other);
654                return this;
655              }
656            }
657    
658            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section other) {
659              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance()) return this;
660              if (other.hasName()) {
661                bitField0_ |= 0x00000001;
662                name_ = other.name_;
663                onChanged();
664              }
665              if (other.hasLength()) {
666                setLength(other.getLength());
667              }
668              if (other.hasOffset()) {
669                setOffset(other.getOffset());
670              }
671              this.mergeUnknownFields(other.getUnknownFields());
672              return this;
673            }
674    
675            public final boolean isInitialized() {
676              return true;
677            }
678    
679            public Builder mergeFrom(
680                com.google.protobuf.CodedInputStream input,
681                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
682                throws java.io.IOException {
683              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parsedMessage = null;
684              try {
685                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
686              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
687                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section) e.getUnfinishedMessage();
688                throw e;
689              } finally {
690                if (parsedMessage != null) {
691                  mergeFrom(parsedMessage);
692                }
693              }
694              return this;
695            }
696            private int bitField0_;
697    
698            // optional string name = 1;
699            private java.lang.Object name_ = "";
700            /**
701             * <code>optional string name = 1;</code>
702             */
703            public boolean hasName() {
704              return ((bitField0_ & 0x00000001) == 0x00000001);
705            }
706            /**
707             * <code>optional string name = 1;</code>
708             */
709            public java.lang.String getName() {
710              java.lang.Object ref = name_;
711              if (!(ref instanceof java.lang.String)) {
712                java.lang.String s = ((com.google.protobuf.ByteString) ref)
713                    .toStringUtf8();
714                name_ = s;
715                return s;
716              } else {
717                return (java.lang.String) ref;
718              }
719            }
720            /**
721             * <code>optional string name = 1;</code>
722             */
723            public com.google.protobuf.ByteString
724                getNameBytes() {
725              java.lang.Object ref = name_;
726              if (ref instanceof String) {
727                com.google.protobuf.ByteString b = 
728                    com.google.protobuf.ByteString.copyFromUtf8(
729                        (java.lang.String) ref);
730                name_ = b;
731                return b;
732              } else {
733                return (com.google.protobuf.ByteString) ref;
734              }
735            }
736            /**
737             * <code>optional string name = 1;</code>
738             */
739            public Builder setName(
740                java.lang.String value) {
741              if (value == null) {
742        throw new NullPointerException();
743      }
744      bitField0_ |= 0x00000001;
745              name_ = value;
746              onChanged();
747              return this;
748            }
749            /**
750             * <code>optional string name = 1;</code>
751             */
752            public Builder clearName() {
753              bitField0_ = (bitField0_ & ~0x00000001);
754              name_ = getDefaultInstance().getName();
755              onChanged();
756              return this;
757            }
758            /**
759             * <code>optional string name = 1;</code>
760             */
761            public Builder setNameBytes(
762                com.google.protobuf.ByteString value) {
763              if (value == null) {
764        throw new NullPointerException();
765      }
766      bitField0_ |= 0x00000001;
767              name_ = value;
768              onChanged();
769              return this;
770            }
771    
772            // optional uint64 length = 2;
773            private long length_ ;
774            /**
775             * <code>optional uint64 length = 2;</code>
776             */
777            public boolean hasLength() {
778              return ((bitField0_ & 0x00000002) == 0x00000002);
779            }
780            /**
781             * <code>optional uint64 length = 2;</code>
782             */
783            public long getLength() {
784              return length_;
785            }
786            /**
787             * <code>optional uint64 length = 2;</code>
788             */
789            public Builder setLength(long value) {
790              bitField0_ |= 0x00000002;
791              length_ = value;
792              onChanged();
793              return this;
794            }
795            /**
796             * <code>optional uint64 length = 2;</code>
797             */
798            public Builder clearLength() {
799              bitField0_ = (bitField0_ & ~0x00000002);
800              length_ = 0L;
801              onChanged();
802              return this;
803            }
804    
805            // optional uint64 offset = 3;
806            private long offset_ ;
807            /**
808             * <code>optional uint64 offset = 3;</code>
809             */
810            public boolean hasOffset() {
811              return ((bitField0_ & 0x00000004) == 0x00000004);
812            }
813            /**
814             * <code>optional uint64 offset = 3;</code>
815             */
816            public long getOffset() {
817              return offset_;
818            }
819            /**
820             * <code>optional uint64 offset = 3;</code>
821             */
822            public Builder setOffset(long value) {
823              bitField0_ |= 0x00000004;
824              offset_ = value;
825              onChanged();
826              return this;
827            }
828            /**
829             * <code>optional uint64 offset = 3;</code>
830             */
831            public Builder clearOffset() {
832              bitField0_ = (bitField0_ & ~0x00000004);
833              offset_ = 0L;
834              onChanged();
835              return this;
836            }
837    
838            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FileSummary.Section)
839          }
840    
841          static {
842            defaultInstance = new Section(true);
843            defaultInstance.initFields();
844          }
845    
846          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FileSummary.Section)
847        }
848    
849        private int bitField0_;
850        // required uint32 ondiskVersion = 1;
851        public static final int ONDISKVERSION_FIELD_NUMBER = 1;
852        private int ondiskVersion_;
853        /**
854         * <code>required uint32 ondiskVersion = 1;</code>
855         *
856         * <pre>
857         * The version of the above EBNF grammars.
858         * </pre>
859         */
860        public boolean hasOndiskVersion() {
861          return ((bitField0_ & 0x00000001) == 0x00000001);
862        }
863        /**
864         * <code>required uint32 ondiskVersion = 1;</code>
865         *
866         * <pre>
867         * The version of the above EBNF grammars.
868         * </pre>
869         */
870        public int getOndiskVersion() {
871          return ondiskVersion_;
872        }
873    
874        // required uint32 layoutVersion = 2;
875        public static final int LAYOUTVERSION_FIELD_NUMBER = 2;
876        private int layoutVersion_;
877        /**
878         * <code>required uint32 layoutVersion = 2;</code>
879         *
880         * <pre>
881         * layoutVersion describes which features are available in the
882         * FSImage.
883         * </pre>
884         */
885        public boolean hasLayoutVersion() {
886          return ((bitField0_ & 0x00000002) == 0x00000002);
887        }
888        /**
889         * <code>required uint32 layoutVersion = 2;</code>
890         *
891         * <pre>
892         * layoutVersion describes which features are available in the
893         * FSImage.
894         * </pre>
895         */
896        public int getLayoutVersion() {
897          return layoutVersion_;
898        }
899    
900        // optional string codec = 3;
901        public static final int CODEC_FIELD_NUMBER = 3;
902        private java.lang.Object codec_;
903        /**
904         * <code>optional string codec = 3;</code>
905         */
906        public boolean hasCodec() {
907          return ((bitField0_ & 0x00000004) == 0x00000004);
908        }
909        /**
910         * <code>optional string codec = 3;</code>
911         */
912        public java.lang.String getCodec() {
913          java.lang.Object ref = codec_;
914          if (ref instanceof java.lang.String) {
915            return (java.lang.String) ref;
916          } else {
917            com.google.protobuf.ByteString bs = 
918                (com.google.protobuf.ByteString) ref;
919            java.lang.String s = bs.toStringUtf8();
920            if (bs.isValidUtf8()) {
921              codec_ = s;
922            }
923            return s;
924          }
925        }
926        /**
927         * <code>optional string codec = 3;</code>
928         */
929        public com.google.protobuf.ByteString
930            getCodecBytes() {
931          java.lang.Object ref = codec_;
932          if (ref instanceof java.lang.String) {
933            com.google.protobuf.ByteString b = 
934                com.google.protobuf.ByteString.copyFromUtf8(
935                    (java.lang.String) ref);
936            codec_ = b;
937            return b;
938          } else {
939            return (com.google.protobuf.ByteString) ref;
940          }
941        }
942    
943        // repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;
944        public static final int SECTIONS_FIELD_NUMBER = 4;
945        private java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> sections_;
946        /**
947         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
948         */
949        public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> getSectionsList() {
950          return sections_;
951        }
952        /**
953         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
954         */
955        public java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
956            getSectionsOrBuilderList() {
957          return sections_;
958        }
959        /**
960         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
961         */
962        public int getSectionsCount() {
963          return sections_.size();
964        }
965        /**
966         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
967         */
968        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getSections(int index) {
969          return sections_.get(index);
970        }
971        /**
972         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
973         */
974        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder getSectionsOrBuilder(
975            int index) {
976          return sections_.get(index);
977        }
978    
979        private void initFields() {
980          ondiskVersion_ = 0;
981          layoutVersion_ = 0;
982          codec_ = "";
983          sections_ = java.util.Collections.emptyList();
984        }
985        private byte memoizedIsInitialized = -1;
986        public final boolean isInitialized() {
987          byte isInitialized = memoizedIsInitialized;
988          if (isInitialized != -1) return isInitialized == 1;
989    
990          if (!hasOndiskVersion()) {
991            memoizedIsInitialized = 0;
992            return false;
993          }
994          if (!hasLayoutVersion()) {
995            memoizedIsInitialized = 0;
996            return false;
997          }
998          memoizedIsInitialized = 1;
999          return true;
1000        }
1001    
1002        public void writeTo(com.google.protobuf.CodedOutputStream output)
1003                            throws java.io.IOException {
1004          getSerializedSize();
1005          if (((bitField0_ & 0x00000001) == 0x00000001)) {
1006            output.writeUInt32(1, ondiskVersion_);
1007          }
1008          if (((bitField0_ & 0x00000002) == 0x00000002)) {
1009            output.writeUInt32(2, layoutVersion_);
1010          }
1011          if (((bitField0_ & 0x00000004) == 0x00000004)) {
1012            output.writeBytes(3, getCodecBytes());
1013          }
1014          for (int i = 0; i < sections_.size(); i++) {
1015            output.writeMessage(4, sections_.get(i));
1016          }
1017          getUnknownFields().writeTo(output);
1018        }
1019    
1020        private int memoizedSerializedSize = -1;
1021        public int getSerializedSize() {
1022          int size = memoizedSerializedSize;
1023          if (size != -1) return size;
1024    
1025          size = 0;
1026          if (((bitField0_ & 0x00000001) == 0x00000001)) {
1027            size += com.google.protobuf.CodedOutputStream
1028              .computeUInt32Size(1, ondiskVersion_);
1029          }
1030          if (((bitField0_ & 0x00000002) == 0x00000002)) {
1031            size += com.google.protobuf.CodedOutputStream
1032              .computeUInt32Size(2, layoutVersion_);
1033          }
1034          if (((bitField0_ & 0x00000004) == 0x00000004)) {
1035            size += com.google.protobuf.CodedOutputStream
1036              .computeBytesSize(3, getCodecBytes());
1037          }
1038          for (int i = 0; i < sections_.size(); i++) {
1039            size += com.google.protobuf.CodedOutputStream
1040              .computeMessageSize(4, sections_.get(i));
1041          }
1042          size += getUnknownFields().getSerializedSize();
1043          memoizedSerializedSize = size;
1044          return size;
1045        }
1046    
1047        private static final long serialVersionUID = 0L;
1048        @java.lang.Override
1049        protected java.lang.Object writeReplace()
1050            throws java.io.ObjectStreamException {
1051          return super.writeReplace();
1052        }
1053    
1054        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1055            com.google.protobuf.ByteString data)
1056            throws com.google.protobuf.InvalidProtocolBufferException {
1057          return PARSER.parseFrom(data);
1058        }
1059        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1060            com.google.protobuf.ByteString data,
1061            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1062            throws com.google.protobuf.InvalidProtocolBufferException {
1063          return PARSER.parseFrom(data, extensionRegistry);
1064        }
1065        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(byte[] data)
1066            throws com.google.protobuf.InvalidProtocolBufferException {
1067          return PARSER.parseFrom(data);
1068        }
1069        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1070            byte[] data,
1071            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1072            throws com.google.protobuf.InvalidProtocolBufferException {
1073          return PARSER.parseFrom(data, extensionRegistry);
1074        }
1075        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(java.io.InputStream input)
1076            throws java.io.IOException {
1077          return PARSER.parseFrom(input);
1078        }
1079        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1080            java.io.InputStream input,
1081            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1082            throws java.io.IOException {
1083          return PARSER.parseFrom(input, extensionRegistry);
1084        }
1085        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseDelimitedFrom(java.io.InputStream input)
1086            throws java.io.IOException {
1087          return PARSER.parseDelimitedFrom(input);
1088        }
1089        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseDelimitedFrom(
1090            java.io.InputStream input,
1091            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1092            throws java.io.IOException {
1093          return PARSER.parseDelimitedFrom(input, extensionRegistry);
1094        }
1095        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1096            com.google.protobuf.CodedInputStream input)
1097            throws java.io.IOException {
1098          return PARSER.parseFrom(input);
1099        }
1100        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1101            com.google.protobuf.CodedInputStream input,
1102            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1103            throws java.io.IOException {
1104          return PARSER.parseFrom(input, extensionRegistry);
1105        }
1106    
1107        public static Builder newBuilder() { return Builder.create(); }
1108        public Builder newBuilderForType() { return newBuilder(); }
1109        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary prototype) {
1110          return newBuilder().mergeFrom(prototype);
1111        }
1112        public Builder toBuilder() { return newBuilder(this); }
1113    
1114        @java.lang.Override
1115        protected Builder newBuilderForType(
1116            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1117          Builder builder = new Builder(parent);
1118          return builder;
1119        }
1120        /**
1121         * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary}
1122         */
1123        public static final class Builder extends
1124            com.google.protobuf.GeneratedMessage.Builder<Builder>
1125           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummaryOrBuilder {
1126          public static final com.google.protobuf.Descriptors.Descriptor
1127              getDescriptor() {
1128            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
1129          }
1130    
1131          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1132              internalGetFieldAccessorTable() {
1133            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable
1134                .ensureFieldAccessorsInitialized(
1135                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Builder.class);
1136          }
1137    
1138          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.newBuilder()
1139          private Builder() {
1140            maybeForceBuilderInitialization();
1141          }
1142    
1143          private Builder(
1144              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1145            super(parent);
1146            maybeForceBuilderInitialization();
1147          }
1148          private void maybeForceBuilderInitialization() {
1149            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1150              getSectionsFieldBuilder();
1151            }
1152          }
1153          private static Builder create() {
1154            return new Builder();
1155          }
1156    
1157          public Builder clear() {
1158            super.clear();
1159            ondiskVersion_ = 0;
1160            bitField0_ = (bitField0_ & ~0x00000001);
1161            layoutVersion_ = 0;
1162            bitField0_ = (bitField0_ & ~0x00000002);
1163            codec_ = "";
1164            bitField0_ = (bitField0_ & ~0x00000004);
1165            if (sectionsBuilder_ == null) {
1166              sections_ = java.util.Collections.emptyList();
1167              bitField0_ = (bitField0_ & ~0x00000008);
1168            } else {
1169              sectionsBuilder_.clear();
1170            }
1171            return this;
1172          }
1173    
1174          public Builder clone() {
1175            return create().mergeFrom(buildPartial());
1176          }
1177    
1178          public com.google.protobuf.Descriptors.Descriptor
1179              getDescriptorForType() {
1180            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
1181          }
1182    
1183          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary getDefaultInstanceForType() {
1184            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.getDefaultInstance();
1185          }
1186    
1187          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary build() {
1188            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary result = buildPartial();
1189            if (!result.isInitialized()) {
1190              throw newUninitializedMessageException(result);
1191            }
1192            return result;
1193          }
1194    
1195          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary buildPartial() {
1196            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary(this);
1197            int from_bitField0_ = bitField0_;
1198            int to_bitField0_ = 0;
1199            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1200              to_bitField0_ |= 0x00000001;
1201            }
1202            result.ondiskVersion_ = ondiskVersion_;
1203            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1204              to_bitField0_ |= 0x00000002;
1205            }
1206            result.layoutVersion_ = layoutVersion_;
1207            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
1208              to_bitField0_ |= 0x00000004;
1209            }
1210            result.codec_ = codec_;
1211            if (sectionsBuilder_ == null) {
1212              if (((bitField0_ & 0x00000008) == 0x00000008)) {
1213                sections_ = java.util.Collections.unmodifiableList(sections_);
1214                bitField0_ = (bitField0_ & ~0x00000008);
1215              }
1216              result.sections_ = sections_;
1217            } else {
1218              result.sections_ = sectionsBuilder_.build();
1219            }
1220            result.bitField0_ = to_bitField0_;
1221            onBuilt();
1222            return result;
1223          }
1224    
1225          public Builder mergeFrom(com.google.protobuf.Message other) {
1226            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) {
1227              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary)other);
1228            } else {
1229              super.mergeFrom(other);
1230              return this;
1231            }
1232          }
1233    
1234          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary other) {
1235            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.getDefaultInstance()) return this;
1236            if (other.hasOndiskVersion()) {
1237              setOndiskVersion(other.getOndiskVersion());
1238            }
1239            if (other.hasLayoutVersion()) {
1240              setLayoutVersion(other.getLayoutVersion());
1241            }
1242            if (other.hasCodec()) {
1243              bitField0_ |= 0x00000004;
1244              codec_ = other.codec_;
1245              onChanged();
1246            }
1247            if (sectionsBuilder_ == null) {
1248              if (!other.sections_.isEmpty()) {
1249                if (sections_.isEmpty()) {
1250                  sections_ = other.sections_;
1251                  bitField0_ = (bitField0_ & ~0x00000008);
1252                } else {
1253                  ensureSectionsIsMutable();
1254                  sections_.addAll(other.sections_);
1255                }
1256                onChanged();
1257              }
1258            } else {
1259              if (!other.sections_.isEmpty()) {
1260                if (sectionsBuilder_.isEmpty()) {
1261                  sectionsBuilder_.dispose();
1262                  sectionsBuilder_ = null;
1263                  sections_ = other.sections_;
1264                  bitField0_ = (bitField0_ & ~0x00000008);
1265                  sectionsBuilder_ = 
1266                    com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
1267                       getSectionsFieldBuilder() : null;
1268                } else {
1269                  sectionsBuilder_.addAllMessages(other.sections_);
1270                }
1271              }
1272            }
1273            this.mergeUnknownFields(other.getUnknownFields());
1274            return this;
1275          }
1276    
1277          public final boolean isInitialized() {
1278            if (!hasOndiskVersion()) {
1279              
1280              return false;
1281            }
1282            if (!hasLayoutVersion()) {
1283              
1284              return false;
1285            }
1286            return true;
1287          }
1288    
1289          public Builder mergeFrom(
1290              com.google.protobuf.CodedInputStream input,
1291              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1292              throws java.io.IOException {
1293            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parsedMessage = null;
1294            try {
1295              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1296            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1297              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) e.getUnfinishedMessage();
1298              throw e;
1299            } finally {
1300              if (parsedMessage != null) {
1301                mergeFrom(parsedMessage);
1302              }
1303            }
1304            return this;
1305          }
1306          private int bitField0_;
1307    
1308          // required uint32 ondiskVersion = 1;
1309          private int ondiskVersion_ ;
1310          /**
1311           * <code>required uint32 ondiskVersion = 1;</code>
1312           *
1313           * <pre>
1314           * The version of the above EBNF grammars.
1315           * </pre>
1316           */
1317          public boolean hasOndiskVersion() {
1318            return ((bitField0_ & 0x00000001) == 0x00000001);
1319          }
1320          /**
1321           * <code>required uint32 ondiskVersion = 1;</code>
1322           *
1323           * <pre>
1324           * The version of the above EBNF grammars.
1325           * </pre>
1326           */
1327          public int getOndiskVersion() {
1328            return ondiskVersion_;
1329          }
1330          /**
1331           * <code>required uint32 ondiskVersion = 1;</code>
1332           *
1333           * <pre>
1334           * The version of the above EBNF grammars.
1335           * </pre>
1336           */
1337          public Builder setOndiskVersion(int value) {
1338            bitField0_ |= 0x00000001;
1339            ondiskVersion_ = value;
1340            onChanged();
1341            return this;
1342          }
1343          /**
1344           * <code>required uint32 ondiskVersion = 1;</code>
1345           *
1346           * <pre>
1347           * The version of the above EBNF grammars.
1348           * </pre>
1349           */
1350          public Builder clearOndiskVersion() {
1351            bitField0_ = (bitField0_ & ~0x00000001);
1352            ondiskVersion_ = 0;
1353            onChanged();
1354            return this;
1355          }
1356    
1357          // required uint32 layoutVersion = 2;
1358          private int layoutVersion_ ;
1359          /**
1360           * <code>required uint32 layoutVersion = 2;</code>
1361           *
1362           * <pre>
1363           * layoutVersion describes which features are available in the
1364           * FSImage.
1365           * </pre>
1366           */
1367          public boolean hasLayoutVersion() {
1368            return ((bitField0_ & 0x00000002) == 0x00000002);
1369          }
1370          /**
1371           * <code>required uint32 layoutVersion = 2;</code>
1372           *
1373           * <pre>
1374           * layoutVersion describes which features are available in the
1375           * FSImage.
1376           * </pre>
1377           */
1378          public int getLayoutVersion() {
1379            return layoutVersion_;
1380          }
1381          /**
1382           * <code>required uint32 layoutVersion = 2;</code>
1383           *
1384           * <pre>
1385           * layoutVersion describes which features are available in the
1386           * FSImage.
1387           * </pre>
1388           */
1389          public Builder setLayoutVersion(int value) {
1390            bitField0_ |= 0x00000002;
1391            layoutVersion_ = value;
1392            onChanged();
1393            return this;
1394          }
1395          /**
1396           * <code>required uint32 layoutVersion = 2;</code>
1397           *
1398           * <pre>
1399           * layoutVersion describes which features are available in the
1400           * FSImage.
1401           * </pre>
1402           */
1403          public Builder clearLayoutVersion() {
1404            bitField0_ = (bitField0_ & ~0x00000002);
1405            layoutVersion_ = 0;
1406            onChanged();
1407            return this;
1408          }
1409    
1410          // optional string codec = 3;
1411          private java.lang.Object codec_ = "";
1412          /**
1413           * <code>optional string codec = 3;</code>
1414           */
1415          public boolean hasCodec() {
1416            return ((bitField0_ & 0x00000004) == 0x00000004);
1417          }
1418          /**
1419           * <code>optional string codec = 3;</code>
1420           */
1421          public java.lang.String getCodec() {
1422            java.lang.Object ref = codec_;
1423            if (!(ref instanceof java.lang.String)) {
1424              java.lang.String s = ((com.google.protobuf.ByteString) ref)
1425                  .toStringUtf8();
1426              codec_ = s;
1427              return s;
1428            } else {
1429              return (java.lang.String) ref;
1430            }
1431          }
1432          /**
1433           * <code>optional string codec = 3;</code>
1434           */
1435          public com.google.protobuf.ByteString
1436              getCodecBytes() {
1437            java.lang.Object ref = codec_;
1438            if (ref instanceof String) {
1439              com.google.protobuf.ByteString b = 
1440                  com.google.protobuf.ByteString.copyFromUtf8(
1441                      (java.lang.String) ref);
1442              codec_ = b;
1443              return b;
1444            } else {
1445              return (com.google.protobuf.ByteString) ref;
1446            }
1447          }
1448          /**
1449           * <code>optional string codec = 3;</code>
1450           */
1451          public Builder setCodec(
1452              java.lang.String value) {
1453            if (value == null) {
1454        throw new NullPointerException();
1455      }
1456      bitField0_ |= 0x00000004;
1457            codec_ = value;
1458            onChanged();
1459            return this;
1460          }
1461          /**
1462           * <code>optional string codec = 3;</code>
1463           */
1464          public Builder clearCodec() {
1465            bitField0_ = (bitField0_ & ~0x00000004);
1466            codec_ = getDefaultInstance().getCodec();
1467            onChanged();
1468            return this;
1469          }
1470          /**
1471           * <code>optional string codec = 3;</code>
1472           */
1473          public Builder setCodecBytes(
1474              com.google.protobuf.ByteString value) {
1475            if (value == null) {
1476        throw new NullPointerException();
1477      }
1478      bitField0_ |= 0x00000004;
1479            codec_ = value;
1480            onChanged();
1481            return this;
1482          }
1483    
1484          // repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;
1485          private java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> sections_ =
1486            java.util.Collections.emptyList();
1487          private void ensureSectionsIsMutable() {
1488            if (!((bitField0_ & 0x00000008) == 0x00000008)) {
1489              sections_ = new java.util.ArrayList<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section>(sections_);
1490              bitField0_ |= 0x00000008;
1491             }
1492          }
1493    
1494          private com.google.protobuf.RepeatedFieldBuilder<
1495              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> sectionsBuilder_;
1496    
1497          /**
1498           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1499           */
1500          public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> getSectionsList() {
1501            if (sectionsBuilder_ == null) {
1502              return java.util.Collections.unmodifiableList(sections_);
1503            } else {
1504              return sectionsBuilder_.getMessageList();
1505            }
1506          }
1507          /**
1508           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1509           */
1510          public int getSectionsCount() {
1511            if (sectionsBuilder_ == null) {
1512              return sections_.size();
1513            } else {
1514              return sectionsBuilder_.getCount();
1515            }
1516          }
1517          /**
1518           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1519           */
1520          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getSections(int index) {
1521            if (sectionsBuilder_ == null) {
1522              return sections_.get(index);
1523            } else {
1524              return sectionsBuilder_.getMessage(index);
1525            }
1526          }
1527          /**
1528           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1529           */
1530          public Builder setSections(
1531              int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section value) {
1532            if (sectionsBuilder_ == null) {
1533              if (value == null) {
1534                throw new NullPointerException();
1535              }
1536              ensureSectionsIsMutable();
1537              sections_.set(index, value);
1538              onChanged();
1539            } else {
1540              sectionsBuilder_.setMessage(index, value);
1541            }
1542            return this;
1543          }
1544          /**
1545           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1546           */
1547          public Builder setSections(
1548              int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder builderForValue) {
1549            if (sectionsBuilder_ == null) {
1550              ensureSectionsIsMutable();
1551              sections_.set(index, builderForValue.build());
1552              onChanged();
1553            } else {
1554              sectionsBuilder_.setMessage(index, builderForValue.build());
1555            }
1556            return this;
1557          }
1558          /**
1559           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1560           */
1561          public Builder addSections(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section value) {
1562            if (sectionsBuilder_ == null) {
1563              if (value == null) {
1564                throw new NullPointerException();
1565              }
1566              ensureSectionsIsMutable();
1567              sections_.add(value);
1568              onChanged();
1569            } else {
1570              sectionsBuilder_.addMessage(value);
1571            }
1572            return this;
1573          }
1574          /**
1575           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1576           */
1577          public Builder addSections(
1578              int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section value) {
1579            if (sectionsBuilder_ == null) {
1580              if (value == null) {
1581                throw new NullPointerException();
1582              }
1583              ensureSectionsIsMutable();
1584              sections_.add(index, value);
1585              onChanged();
1586            } else {
1587              sectionsBuilder_.addMessage(index, value);
1588            }
1589            return this;
1590          }
1591          /**
1592           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1593           */
1594          public Builder addSections(
1595              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder builderForValue) {
1596            if (sectionsBuilder_ == null) {
1597              ensureSectionsIsMutable();
1598              sections_.add(builderForValue.build());
1599              onChanged();
1600            } else {
1601              sectionsBuilder_.addMessage(builderForValue.build());
1602            }
1603            return this;
1604          }
1605          /**
1606           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1607           */
1608          public Builder addSections(
1609              int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder builderForValue) {
1610            if (sectionsBuilder_ == null) {
1611              ensureSectionsIsMutable();
1612              sections_.add(index, builderForValue.build());
1613              onChanged();
1614            } else {
1615              sectionsBuilder_.addMessage(index, builderForValue.build());
1616            }
1617            return this;
1618          }
1619          /**
1620           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1621           */
1622          public Builder addAllSections(
1623              java.lang.Iterable<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> values) {
1624            if (sectionsBuilder_ == null) {
1625              ensureSectionsIsMutable();
1626              super.addAll(values, sections_);
1627              onChanged();
1628            } else {
1629              sectionsBuilder_.addAllMessages(values);
1630            }
1631            return this;
1632          }
1633          /**
1634           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1635           */
1636          public Builder clearSections() {
1637            if (sectionsBuilder_ == null) {
1638              sections_ = java.util.Collections.emptyList();
1639              bitField0_ = (bitField0_ & ~0x00000008);
1640              onChanged();
1641            } else {
1642              sectionsBuilder_.clear();
1643            }
1644            return this;
1645          }
1646          /**
1647           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1648           */
1649          public Builder removeSections(int index) {
1650            if (sectionsBuilder_ == null) {
1651              ensureSectionsIsMutable();
1652              sections_.remove(index);
1653              onChanged();
1654            } else {
1655              sectionsBuilder_.remove(index);
1656            }
1657            return this;
1658          }
1659          /**
1660           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1661           */
1662          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder getSectionsBuilder(
1663              int index) {
1664            return getSectionsFieldBuilder().getBuilder(index);
1665          }
1666          /**
1667           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1668           */
1669          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder getSectionsOrBuilder(
1670              int index) {
1671            if (sectionsBuilder_ == null) {
1672              return sections_.get(index);  } else {
1673              return sectionsBuilder_.getMessageOrBuilder(index);
1674            }
1675          }
1676          /**
1677           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1678           */
1679          public java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
1680               getSectionsOrBuilderList() {
1681            if (sectionsBuilder_ != null) {
1682              return sectionsBuilder_.getMessageOrBuilderList();
1683            } else {
1684              return java.util.Collections.unmodifiableList(sections_);
1685            }
1686          }
1687          /**
1688           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1689           */
1690          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder addSectionsBuilder() {
1691            return getSectionsFieldBuilder().addBuilder(
1692                org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance());
1693          }
1694          /**
1695           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1696           */
1697          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder addSectionsBuilder(
1698              int index) {
1699            return getSectionsFieldBuilder().addBuilder(
1700                index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance());
1701          }
1702          /**
1703           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1704           */
1705          public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder> 
1706               getSectionsBuilderList() {
1707            return getSectionsFieldBuilder().getBuilderList();
1708          }
1709          private com.google.protobuf.RepeatedFieldBuilder<
1710              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
1711              getSectionsFieldBuilder() {
1712            if (sectionsBuilder_ == null) {
1713              sectionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
1714                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder>(
1715                      sections_,
1716                      ((bitField0_ & 0x00000008) == 0x00000008),
1717                      getParentForChildren(),
1718                      isClean());
1719              sections_ = null;
1720            }
1721            return sectionsBuilder_;
1722          }
1723    
1724          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FileSummary)
1725        }
1726    
1727        static {
1728          defaultInstance = new FileSummary(true);
1729          defaultInstance.initFields();
1730        }
1731    
1732        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FileSummary)
1733      }
1734    
1735      public interface NameSystemSectionOrBuilder
1736          extends com.google.protobuf.MessageOrBuilder {
1737    
1738        // optional uint32 namespaceId = 1;
1739        /**
1740         * <code>optional uint32 namespaceId = 1;</code>
1741         */
1742        boolean hasNamespaceId();
1743        /**
1744         * <code>optional uint32 namespaceId = 1;</code>
1745         */
1746        int getNamespaceId();
1747    
1748        // optional uint64 genstampV1 = 2;
1749        /**
1750         * <code>optional uint64 genstampV1 = 2;</code>
1751         */
1752        boolean hasGenstampV1();
1753        /**
1754         * <code>optional uint64 genstampV1 = 2;</code>
1755         */
1756        long getGenstampV1();
1757    
1758        // optional uint64 genstampV2 = 3;
1759        /**
1760         * <code>optional uint64 genstampV2 = 3;</code>
1761         */
1762        boolean hasGenstampV2();
1763        /**
1764         * <code>optional uint64 genstampV2 = 3;</code>
1765         */
1766        long getGenstampV2();
1767    
1768        // optional uint64 genstampV1Limit = 4;
1769        /**
1770         * <code>optional uint64 genstampV1Limit = 4;</code>
1771         */
1772        boolean hasGenstampV1Limit();
1773        /**
1774         * <code>optional uint64 genstampV1Limit = 4;</code>
1775         */
1776        long getGenstampV1Limit();
1777    
1778        // optional uint64 lastAllocatedBlockId = 5;
1779        /**
1780         * <code>optional uint64 lastAllocatedBlockId = 5;</code>
1781         */
1782        boolean hasLastAllocatedBlockId();
1783        /**
1784         * <code>optional uint64 lastAllocatedBlockId = 5;</code>
1785         */
1786        long getLastAllocatedBlockId();
1787    
1788        // optional uint64 transactionId = 6;
1789        /**
1790         * <code>optional uint64 transactionId = 6;</code>
1791         */
1792        boolean hasTransactionId();
1793        /**
1794         * <code>optional uint64 transactionId = 6;</code>
1795         */
1796        long getTransactionId();
1797    
1798        // optional uint64 rollingUpgradeStartTime = 7;
1799        /**
1800         * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
1801         */
1802        boolean hasRollingUpgradeStartTime();
1803        /**
1804         * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
1805         */
1806        long getRollingUpgradeStartTime();
1807      }
1808      /**
1809       * Protobuf type {@code hadoop.hdfs.fsimage.NameSystemSection}
1810       *
1811       * <pre>
1812       **
1813       * Name: NS_INFO
1814       * </pre>
1815       */
1816      public static final class NameSystemSection extends
1817          com.google.protobuf.GeneratedMessage
1818          implements NameSystemSectionOrBuilder {
1819        // Use NameSystemSection.newBuilder() to construct.
1820        private NameSystemSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1821          super(builder);
1822          this.unknownFields = builder.getUnknownFields();
1823        }
1824        private NameSystemSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1825    
1826        private static final NameSystemSection defaultInstance;
1827        public static NameSystemSection getDefaultInstance() {
1828          return defaultInstance;
1829        }
1830    
1831        public NameSystemSection getDefaultInstanceForType() {
1832          return defaultInstance;
1833        }
1834    
1835        private final com.google.protobuf.UnknownFieldSet unknownFields;
1836        @java.lang.Override
1837        public final com.google.protobuf.UnknownFieldSet
1838            getUnknownFields() {
1839          return this.unknownFields;
1840        }
1841        private NameSystemSection(
1842            com.google.protobuf.CodedInputStream input,
1843            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1844            throws com.google.protobuf.InvalidProtocolBufferException {
1845          initFields();
1846          int mutable_bitField0_ = 0;
1847          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1848              com.google.protobuf.UnknownFieldSet.newBuilder();
1849          try {
1850            boolean done = false;
1851            while (!done) {
1852              int tag = input.readTag();
1853              switch (tag) {
1854                case 0:
1855                  done = true;
1856                  break;
1857                default: {
1858                  if (!parseUnknownField(input, unknownFields,
1859                                         extensionRegistry, tag)) {
1860                    done = true;
1861                  }
1862                  break;
1863                }
1864                case 8: {
1865                  bitField0_ |= 0x00000001;
1866                  namespaceId_ = input.readUInt32();
1867                  break;
1868                }
1869                case 16: {
1870                  bitField0_ |= 0x00000002;
1871                  genstampV1_ = input.readUInt64();
1872                  break;
1873                }
1874                case 24: {
1875                  bitField0_ |= 0x00000004;
1876                  genstampV2_ = input.readUInt64();
1877                  break;
1878                }
1879                case 32: {
1880                  bitField0_ |= 0x00000008;
1881                  genstampV1Limit_ = input.readUInt64();
1882                  break;
1883                }
1884                case 40: {
1885                  bitField0_ |= 0x00000010;
1886                  lastAllocatedBlockId_ = input.readUInt64();
1887                  break;
1888                }
1889                case 48: {
1890                  bitField0_ |= 0x00000020;
1891                  transactionId_ = input.readUInt64();
1892                  break;
1893                }
1894                case 56: {
1895                  bitField0_ |= 0x00000040;
1896                  rollingUpgradeStartTime_ = input.readUInt64();
1897                  break;
1898                }
1899              }
1900            }
1901          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1902            throw e.setUnfinishedMessage(this);
1903          } catch (java.io.IOException e) {
1904            throw new com.google.protobuf.InvalidProtocolBufferException(
1905                e.getMessage()).setUnfinishedMessage(this);
1906          } finally {
1907            this.unknownFields = unknownFields.build();
1908            makeExtensionsImmutable();
1909          }
1910        }
1911        public static final com.google.protobuf.Descriptors.Descriptor
1912            getDescriptor() {
1913          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
1914        }
1915    
1916        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1917            internalGetFieldAccessorTable() {
1918          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable
1919              .ensureFieldAccessorsInitialized(
1920                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.Builder.class);
1921        }
1922    
1923        public static com.google.protobuf.Parser<NameSystemSection> PARSER =
1924            new com.google.protobuf.AbstractParser<NameSystemSection>() {
1925          public NameSystemSection parsePartialFrom(
1926              com.google.protobuf.CodedInputStream input,
1927              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1928              throws com.google.protobuf.InvalidProtocolBufferException {
1929            return new NameSystemSection(input, extensionRegistry);
1930          }
1931        };
1932    
1933        @java.lang.Override
1934        public com.google.protobuf.Parser<NameSystemSection> getParserForType() {
1935          return PARSER;
1936        }
1937    
1938        private int bitField0_;
1939        // optional uint32 namespaceId = 1;
1940        public static final int NAMESPACEID_FIELD_NUMBER = 1;
1941        private int namespaceId_;
1942        /**
1943         * <code>optional uint32 namespaceId = 1;</code>
1944         */
1945        public boolean hasNamespaceId() {
1946          return ((bitField0_ & 0x00000001) == 0x00000001);
1947        }
1948        /**
1949         * <code>optional uint32 namespaceId = 1;</code>
1950         */
1951        public int getNamespaceId() {
1952          return namespaceId_;
1953        }
1954    
1955        // optional uint64 genstampV1 = 2;
1956        public static final int GENSTAMPV1_FIELD_NUMBER = 2;
1957        private long genstampV1_;
1958        /**
1959         * <code>optional uint64 genstampV1 = 2;</code>
1960         */
1961        public boolean hasGenstampV1() {
1962          return ((bitField0_ & 0x00000002) == 0x00000002);
1963        }
1964        /**
1965         * <code>optional uint64 genstampV1 = 2;</code>
1966         */
1967        public long getGenstampV1() {
1968          return genstampV1_;
1969        }
1970    
1971        // optional uint64 genstampV2 = 3;
1972        public static final int GENSTAMPV2_FIELD_NUMBER = 3;
1973        private long genstampV2_;
1974        /**
1975         * <code>optional uint64 genstampV2 = 3;</code>
1976         */
1977        public boolean hasGenstampV2() {
1978          return ((bitField0_ & 0x00000004) == 0x00000004);
1979        }
1980        /**
1981         * <code>optional uint64 genstampV2 = 3;</code>
1982         */
1983        public long getGenstampV2() {
1984          return genstampV2_;
1985        }
1986    
1987        // optional uint64 genstampV1Limit = 4;
1988        public static final int GENSTAMPV1LIMIT_FIELD_NUMBER = 4;
1989        private long genstampV1Limit_;
1990        /**
1991         * <code>optional uint64 genstampV1Limit = 4;</code>
1992         */
1993        public boolean hasGenstampV1Limit() {
1994          return ((bitField0_ & 0x00000008) == 0x00000008);
1995        }
1996        /**
1997         * <code>optional uint64 genstampV1Limit = 4;</code>
1998         */
1999        public long getGenstampV1Limit() {
2000          return genstampV1Limit_;
2001        }
2002    
2003        // optional uint64 lastAllocatedBlockId = 5;
2004        public static final int LASTALLOCATEDBLOCKID_FIELD_NUMBER = 5;
2005        private long lastAllocatedBlockId_;
2006        /**
2007         * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2008         */
2009        public boolean hasLastAllocatedBlockId() {
2010          return ((bitField0_ & 0x00000010) == 0x00000010);
2011        }
2012        /**
2013         * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2014         */
2015        public long getLastAllocatedBlockId() {
2016          return lastAllocatedBlockId_;
2017        }
2018    
2019        // optional uint64 transactionId = 6;
2020        public static final int TRANSACTIONID_FIELD_NUMBER = 6;
2021        private long transactionId_;
2022        /**
2023         * <code>optional uint64 transactionId = 6;</code>
2024         */
2025        public boolean hasTransactionId() {
2026          return ((bitField0_ & 0x00000020) == 0x00000020);
2027        }
2028        /**
2029         * <code>optional uint64 transactionId = 6;</code>
2030         */
2031        public long getTransactionId() {
2032          return transactionId_;
2033        }
2034    
2035        // optional uint64 rollingUpgradeStartTime = 7;
2036        public static final int ROLLINGUPGRADESTARTTIME_FIELD_NUMBER = 7;
2037        private long rollingUpgradeStartTime_;
2038        /**
2039         * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2040         */
2041        public boolean hasRollingUpgradeStartTime() {
2042          return ((bitField0_ & 0x00000040) == 0x00000040);
2043        }
2044        /**
2045         * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2046         */
2047        public long getRollingUpgradeStartTime() {
2048          return rollingUpgradeStartTime_;
2049        }
2050    
2051        private void initFields() {
2052          namespaceId_ = 0;
2053          genstampV1_ = 0L;
2054          genstampV2_ = 0L;
2055          genstampV1Limit_ = 0L;
2056          lastAllocatedBlockId_ = 0L;
2057          transactionId_ = 0L;
2058          rollingUpgradeStartTime_ = 0L;
2059        }
2060        private byte memoizedIsInitialized = -1;
2061        public final boolean isInitialized() {
2062          byte isInitialized = memoizedIsInitialized;
2063          if (isInitialized != -1) return isInitialized == 1;
2064    
2065          memoizedIsInitialized = 1;
2066          return true;
2067        }
2068    
2069        public void writeTo(com.google.protobuf.CodedOutputStream output)
2070                            throws java.io.IOException {
2071          getSerializedSize();
2072          if (((bitField0_ & 0x00000001) == 0x00000001)) {
2073            output.writeUInt32(1, namespaceId_);
2074          }
2075          if (((bitField0_ & 0x00000002) == 0x00000002)) {
2076            output.writeUInt64(2, genstampV1_);
2077          }
2078          if (((bitField0_ & 0x00000004) == 0x00000004)) {
2079            output.writeUInt64(3, genstampV2_);
2080          }
2081          if (((bitField0_ & 0x00000008) == 0x00000008)) {
2082            output.writeUInt64(4, genstampV1Limit_);
2083          }
2084          if (((bitField0_ & 0x00000010) == 0x00000010)) {
2085            output.writeUInt64(5, lastAllocatedBlockId_);
2086          }
2087          if (((bitField0_ & 0x00000020) == 0x00000020)) {
2088            output.writeUInt64(6, transactionId_);
2089          }
2090          if (((bitField0_ & 0x00000040) == 0x00000040)) {
2091            output.writeUInt64(7, rollingUpgradeStartTime_);
2092          }
2093          getUnknownFields().writeTo(output);
2094        }
2095    
2096        private int memoizedSerializedSize = -1;
2097        public int getSerializedSize() {
2098          int size = memoizedSerializedSize;
2099          if (size != -1) return size;
2100    
2101          size = 0;
2102          if (((bitField0_ & 0x00000001) == 0x00000001)) {
2103            size += com.google.protobuf.CodedOutputStream
2104              .computeUInt32Size(1, namespaceId_);
2105          }
2106          if (((bitField0_ & 0x00000002) == 0x00000002)) {
2107            size += com.google.protobuf.CodedOutputStream
2108              .computeUInt64Size(2, genstampV1_);
2109          }
2110          if (((bitField0_ & 0x00000004) == 0x00000004)) {
2111            size += com.google.protobuf.CodedOutputStream
2112              .computeUInt64Size(3, genstampV2_);
2113          }
2114          if (((bitField0_ & 0x00000008) == 0x00000008)) {
2115            size += com.google.protobuf.CodedOutputStream
2116              .computeUInt64Size(4, genstampV1Limit_);
2117          }
2118          if (((bitField0_ & 0x00000010) == 0x00000010)) {
2119            size += com.google.protobuf.CodedOutputStream
2120              .computeUInt64Size(5, lastAllocatedBlockId_);
2121          }
2122          if (((bitField0_ & 0x00000020) == 0x00000020)) {
2123            size += com.google.protobuf.CodedOutputStream
2124              .computeUInt64Size(6, transactionId_);
2125          }
2126          if (((bitField0_ & 0x00000040) == 0x00000040)) {
2127            size += com.google.protobuf.CodedOutputStream
2128              .computeUInt64Size(7, rollingUpgradeStartTime_);
2129          }
2130          size += getUnknownFields().getSerializedSize();
2131          memoizedSerializedSize = size;
2132          return size;
2133        }
2134    
2135        private static final long serialVersionUID = 0L;
2136        @java.lang.Override
2137        protected java.lang.Object writeReplace()
2138            throws java.io.ObjectStreamException {
2139          return super.writeReplace();
2140        }
2141    
2142        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2143            com.google.protobuf.ByteString data)
2144            throws com.google.protobuf.InvalidProtocolBufferException {
2145          return PARSER.parseFrom(data);
2146        }
2147        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2148            com.google.protobuf.ByteString data,
2149            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2150            throws com.google.protobuf.InvalidProtocolBufferException {
2151          return PARSER.parseFrom(data, extensionRegistry);
2152        }
2153        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(byte[] data)
2154            throws com.google.protobuf.InvalidProtocolBufferException {
2155          return PARSER.parseFrom(data);
2156        }
2157        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2158            byte[] data,
2159            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2160            throws com.google.protobuf.InvalidProtocolBufferException {
2161          return PARSER.parseFrom(data, extensionRegistry);
2162        }
2163        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(java.io.InputStream input)
2164            throws java.io.IOException {
2165          return PARSER.parseFrom(input);
2166        }
2167        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2168            java.io.InputStream input,
2169            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2170            throws java.io.IOException {
2171          return PARSER.parseFrom(input, extensionRegistry);
2172        }
2173        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseDelimitedFrom(java.io.InputStream input)
2174            throws java.io.IOException {
2175          return PARSER.parseDelimitedFrom(input);
2176        }
2177        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseDelimitedFrom(
2178            java.io.InputStream input,
2179            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2180            throws java.io.IOException {
2181          return PARSER.parseDelimitedFrom(input, extensionRegistry);
2182        }
2183        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2184            com.google.protobuf.CodedInputStream input)
2185            throws java.io.IOException {
2186          return PARSER.parseFrom(input);
2187        }
2188        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2189            com.google.protobuf.CodedInputStream input,
2190            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2191            throws java.io.IOException {
2192          return PARSER.parseFrom(input, extensionRegistry);
2193        }
2194    
2195        public static Builder newBuilder() { return Builder.create(); }
2196        public Builder newBuilderForType() { return newBuilder(); }
2197        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection prototype) {
2198          return newBuilder().mergeFrom(prototype);
2199        }
2200        public Builder toBuilder() { return newBuilder(this); }
2201    
2202        @java.lang.Override
2203        protected Builder newBuilderForType(
2204            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2205          Builder builder = new Builder(parent);
2206          return builder;
2207        }
2208        /**
2209         * Protobuf type {@code hadoop.hdfs.fsimage.NameSystemSection}
2210         *
2211         * <pre>
2212         **
2213         * Name: NS_INFO
2214         * </pre>
2215         */
2216        public static final class Builder extends
2217            com.google.protobuf.GeneratedMessage.Builder<Builder>
2218           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSectionOrBuilder {
2219          public static final com.google.protobuf.Descriptors.Descriptor
2220              getDescriptor() {
2221            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
2222          }
2223    
2224          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2225              internalGetFieldAccessorTable() {
2226            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable
2227                .ensureFieldAccessorsInitialized(
2228                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.Builder.class);
2229          }
2230    
2231          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.newBuilder()
2232          private Builder() {
2233            maybeForceBuilderInitialization();
2234          }
2235    
2236          private Builder(
2237              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2238            super(parent);
2239            maybeForceBuilderInitialization();
2240          }
2241          private void maybeForceBuilderInitialization() {
2242            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2243            }
2244          }
2245          private static Builder create() {
2246            return new Builder();
2247          }
2248    
2249          public Builder clear() {
2250            super.clear();
2251            namespaceId_ = 0;
2252            bitField0_ = (bitField0_ & ~0x00000001);
2253            genstampV1_ = 0L;
2254            bitField0_ = (bitField0_ & ~0x00000002);
2255            genstampV2_ = 0L;
2256            bitField0_ = (bitField0_ & ~0x00000004);
2257            genstampV1Limit_ = 0L;
2258            bitField0_ = (bitField0_ & ~0x00000008);
2259            lastAllocatedBlockId_ = 0L;
2260            bitField0_ = (bitField0_ & ~0x00000010);
2261            transactionId_ = 0L;
2262            bitField0_ = (bitField0_ & ~0x00000020);
2263            rollingUpgradeStartTime_ = 0L;
2264            bitField0_ = (bitField0_ & ~0x00000040);
2265            return this;
2266          }
2267    
2268          public Builder clone() {
2269            return create().mergeFrom(buildPartial());
2270          }
2271    
2272          public com.google.protobuf.Descriptors.Descriptor
2273              getDescriptorForType() {
2274            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
2275          }
2276    
2277          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection getDefaultInstanceForType() {
2278            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.getDefaultInstance();
2279          }
2280    
2281          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection build() {
2282            org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection result = buildPartial();
2283            if (!result.isInitialized()) {
2284              throw newUninitializedMessageException(result);
2285            }
2286            return result;
2287          }
2288    
2289          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection buildPartial() {
2290            org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection(this);
2291            int from_bitField0_ = bitField0_;
2292            int to_bitField0_ = 0;
2293            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2294              to_bitField0_ |= 0x00000001;
2295            }
2296            result.namespaceId_ = namespaceId_;
2297            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
2298              to_bitField0_ |= 0x00000002;
2299            }
2300            result.genstampV1_ = genstampV1_;
2301            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
2302              to_bitField0_ |= 0x00000004;
2303            }
2304            result.genstampV2_ = genstampV2_;
2305            if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
2306              to_bitField0_ |= 0x00000008;
2307            }
2308            result.genstampV1Limit_ = genstampV1Limit_;
2309            if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
2310              to_bitField0_ |= 0x00000010;
2311            }
2312            result.lastAllocatedBlockId_ = lastAllocatedBlockId_;
2313            if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
2314              to_bitField0_ |= 0x00000020;
2315            }
2316            result.transactionId_ = transactionId_;
2317            if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
2318              to_bitField0_ |= 0x00000040;
2319            }
2320            result.rollingUpgradeStartTime_ = rollingUpgradeStartTime_;
2321            result.bitField0_ = to_bitField0_;
2322            onBuilt();
2323            return result;
2324          }
2325    
2326          public Builder mergeFrom(com.google.protobuf.Message other) {
2327            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection) {
2328              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection)other);
2329            } else {
2330              super.mergeFrom(other);
2331              return this;
2332            }
2333          }
2334    
2335          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection other) {
2336            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.getDefaultInstance()) return this;
2337            if (other.hasNamespaceId()) {
2338              setNamespaceId(other.getNamespaceId());
2339            }
2340            if (other.hasGenstampV1()) {
2341              setGenstampV1(other.getGenstampV1());
2342            }
2343            if (other.hasGenstampV2()) {
2344              setGenstampV2(other.getGenstampV2());
2345            }
2346            if (other.hasGenstampV1Limit()) {
2347              setGenstampV1Limit(other.getGenstampV1Limit());
2348            }
2349            if (other.hasLastAllocatedBlockId()) {
2350              setLastAllocatedBlockId(other.getLastAllocatedBlockId());
2351            }
2352            if (other.hasTransactionId()) {
2353              setTransactionId(other.getTransactionId());
2354            }
2355            if (other.hasRollingUpgradeStartTime()) {
2356              setRollingUpgradeStartTime(other.getRollingUpgradeStartTime());
2357            }
2358            this.mergeUnknownFields(other.getUnknownFields());
2359            return this;
2360          }
2361    
2362          public final boolean isInitialized() {
2363            return true;
2364          }
2365    
2366          public Builder mergeFrom(
2367              com.google.protobuf.CodedInputStream input,
2368              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2369              throws java.io.IOException {
2370            org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parsedMessage = null;
2371            try {
2372              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2373            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2374              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection) e.getUnfinishedMessage();
2375              throw e;
2376            } finally {
2377              if (parsedMessage != null) {
2378                mergeFrom(parsedMessage);
2379              }
2380            }
2381            return this;
2382          }
2383          private int bitField0_;
2384    
2385          // optional uint32 namespaceId = 1;
2386          private int namespaceId_ ;
2387          /**
2388           * <code>optional uint32 namespaceId = 1;</code>
2389           */
2390          public boolean hasNamespaceId() {
2391            return ((bitField0_ & 0x00000001) == 0x00000001);
2392          }
2393          /**
2394           * <code>optional uint32 namespaceId = 1;</code>
2395           */
2396          public int getNamespaceId() {
2397            return namespaceId_;
2398          }
2399          /**
2400           * <code>optional uint32 namespaceId = 1;</code>
2401           */
2402          public Builder setNamespaceId(int value) {
2403            bitField0_ |= 0x00000001;
2404            namespaceId_ = value;
2405            onChanged();
2406            return this;
2407          }
2408          /**
2409           * <code>optional uint32 namespaceId = 1;</code>
2410           */
2411          public Builder clearNamespaceId() {
2412            bitField0_ = (bitField0_ & ~0x00000001);
2413            namespaceId_ = 0;
2414            onChanged();
2415            return this;
2416          }
2417    
2418          // optional uint64 genstampV1 = 2;
2419          private long genstampV1_ ;
2420          /**
2421           * <code>optional uint64 genstampV1 = 2;</code>
2422           */
2423          public boolean hasGenstampV1() {
2424            return ((bitField0_ & 0x00000002) == 0x00000002);
2425          }
2426          /**
2427           * <code>optional uint64 genstampV1 = 2;</code>
2428           */
2429          public long getGenstampV1() {
2430            return genstampV1_;
2431          }
2432          /**
2433           * <code>optional uint64 genstampV1 = 2;</code>
2434           */
2435          public Builder setGenstampV1(long value) {
2436            bitField0_ |= 0x00000002;
2437            genstampV1_ = value;
2438            onChanged();
2439            return this;
2440          }
2441          /**
2442           * <code>optional uint64 genstampV1 = 2;</code>
2443           */
2444          public Builder clearGenstampV1() {
2445            bitField0_ = (bitField0_ & ~0x00000002);
2446            genstampV1_ = 0L;
2447            onChanged();
2448            return this;
2449          }
2450    
2451          // optional uint64 genstampV2 = 3;
2452          private long genstampV2_ ;
2453          /**
2454           * <code>optional uint64 genstampV2 = 3;</code>
2455           */
2456          public boolean hasGenstampV2() {
2457            return ((bitField0_ & 0x00000004) == 0x00000004);
2458          }
2459          /**
2460           * <code>optional uint64 genstampV2 = 3;</code>
2461           */
2462          public long getGenstampV2() {
2463            return genstampV2_;
2464          }
2465          /**
2466           * <code>optional uint64 genstampV2 = 3;</code>
2467           */
2468          public Builder setGenstampV2(long value) {
2469            bitField0_ |= 0x00000004;
2470            genstampV2_ = value;
2471            onChanged();
2472            return this;
2473          }
2474          /**
2475           * <code>optional uint64 genstampV2 = 3;</code>
2476           */
2477          public Builder clearGenstampV2() {
2478            bitField0_ = (bitField0_ & ~0x00000004);
2479            genstampV2_ = 0L;
2480            onChanged();
2481            return this;
2482          }
2483    
2484          // optional uint64 genstampV1Limit = 4;
2485          private long genstampV1Limit_ ;
2486          /**
2487           * <code>optional uint64 genstampV1Limit = 4;</code>
2488           */
2489          public boolean hasGenstampV1Limit() {
2490            return ((bitField0_ & 0x00000008) == 0x00000008);
2491          }
2492          /**
2493           * <code>optional uint64 genstampV1Limit = 4;</code>
2494           */
2495          public long getGenstampV1Limit() {
2496            return genstampV1Limit_;
2497          }
2498          /**
2499           * <code>optional uint64 genstampV1Limit = 4;</code>
2500           */
2501          public Builder setGenstampV1Limit(long value) {
2502            bitField0_ |= 0x00000008;
2503            genstampV1Limit_ = value;
2504            onChanged();
2505            return this;
2506          }
2507          /**
2508           * <code>optional uint64 genstampV1Limit = 4;</code>
2509           */
2510          public Builder clearGenstampV1Limit() {
2511            bitField0_ = (bitField0_ & ~0x00000008);
2512            genstampV1Limit_ = 0L;
2513            onChanged();
2514            return this;
2515          }
2516    
2517          // optional uint64 lastAllocatedBlockId = 5;
2518          private long lastAllocatedBlockId_ ;
2519          /**
2520           * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2521           */
2522          public boolean hasLastAllocatedBlockId() {
2523            return ((bitField0_ & 0x00000010) == 0x00000010);
2524          }
2525          /**
2526           * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2527           */
2528          public long getLastAllocatedBlockId() {
2529            return lastAllocatedBlockId_;
2530          }
2531          /**
2532           * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2533           */
2534          public Builder setLastAllocatedBlockId(long value) {
2535            bitField0_ |= 0x00000010;
2536            lastAllocatedBlockId_ = value;
2537            onChanged();
2538            return this;
2539          }
2540          /**
2541           * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2542           */
2543          public Builder clearLastAllocatedBlockId() {
2544            bitField0_ = (bitField0_ & ~0x00000010);
2545            lastAllocatedBlockId_ = 0L;
2546            onChanged();
2547            return this;
2548          }
2549    
2550          // optional uint64 transactionId = 6;
2551          private long transactionId_ ;
2552          /**
2553           * <code>optional uint64 transactionId = 6;</code>
2554           */
2555          public boolean hasTransactionId() {
2556            return ((bitField0_ & 0x00000020) == 0x00000020);
2557          }
2558          /**
2559           * <code>optional uint64 transactionId = 6;</code>
2560           */
2561          public long getTransactionId() {
2562            return transactionId_;
2563          }
2564          /**
2565           * <code>optional uint64 transactionId = 6;</code>
2566           */
2567          public Builder setTransactionId(long value) {
2568            bitField0_ |= 0x00000020;
2569            transactionId_ = value;
2570            onChanged();
2571            return this;
2572          }
2573          /**
2574           * <code>optional uint64 transactionId = 6;</code>
2575           */
2576          public Builder clearTransactionId() {
2577            bitField0_ = (bitField0_ & ~0x00000020);
2578            transactionId_ = 0L;
2579            onChanged();
2580            return this;
2581          }
2582    
2583          // optional uint64 rollingUpgradeStartTime = 7;
2584          private long rollingUpgradeStartTime_ ;
2585          /**
2586           * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2587           */
2588          public boolean hasRollingUpgradeStartTime() {
2589            return ((bitField0_ & 0x00000040) == 0x00000040);
2590          }
2591          /**
2592           * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2593           */
2594          public long getRollingUpgradeStartTime() {
2595            return rollingUpgradeStartTime_;
2596          }
2597          /**
2598           * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2599           */
2600          public Builder setRollingUpgradeStartTime(long value) {
2601            bitField0_ |= 0x00000040;
2602            rollingUpgradeStartTime_ = value;
2603            onChanged();
2604            return this;
2605          }
2606          /**
2607           * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2608           */
2609          public Builder clearRollingUpgradeStartTime() {
2610            bitField0_ = (bitField0_ & ~0x00000040);
2611            rollingUpgradeStartTime_ = 0L;
2612            onChanged();
2613            return this;
2614          }
2615    
2616          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.NameSystemSection)
2617        }
2618    
2619        static {
2620          defaultInstance = new NameSystemSection(true);
2621          defaultInstance.initFields();
2622        }
2623    
2624        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.NameSystemSection)
2625      }
2626    
2627      public interface INodeSectionOrBuilder
2628          extends com.google.protobuf.MessageOrBuilder {
2629    
2630        // optional uint64 lastInodeId = 1;
2631        /**
2632         * <code>optional uint64 lastInodeId = 1;</code>
2633         */
2634        boolean hasLastInodeId();
2635        /**
2636         * <code>optional uint64 lastInodeId = 1;</code>
2637         */
2638        long getLastInodeId();
2639    
2640        // optional uint64 numInodes = 2;
2641        /**
2642         * <code>optional uint64 numInodes = 2;</code>
2643         *
2644         * <pre>
2645         * repeated INodes..
2646         * </pre>
2647         */
2648        boolean hasNumInodes();
2649        /**
2650         * <code>optional uint64 numInodes = 2;</code>
2651         *
2652         * <pre>
2653         * repeated INodes..
2654         * </pre>
2655         */
2656        long getNumInodes();
2657      }
2658      /**
2659       * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection}
2660       *
2661       * <pre>
2662       **
2663       * Permission is serialized as a 64-bit long. [0:24):[25:48):[48:64) (in Big Endian).
2664       * The first and the second parts are the string ids of the user and
2665       * group name, and the last 16 bits are the permission bits.
2666       *
2667       * Name: INODE
2668       * </pre>
2669       */
2670      public static final class INodeSection extends
2671          com.google.protobuf.GeneratedMessage
2672          implements INodeSectionOrBuilder {
2673        // Use INodeSection.newBuilder() to construct.
2674        private INodeSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2675          super(builder);
2676          this.unknownFields = builder.getUnknownFields();
2677        }
2678        private INodeSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2679    
2680        private static final INodeSection defaultInstance;
2681        public static INodeSection getDefaultInstance() {
2682          return defaultInstance;
2683        }
2684    
2685        public INodeSection getDefaultInstanceForType() {
2686          return defaultInstance;
2687        }
2688    
2689        private final com.google.protobuf.UnknownFieldSet unknownFields;
2690        @java.lang.Override
2691        public final com.google.protobuf.UnknownFieldSet
2692            getUnknownFields() {
2693          return this.unknownFields;
2694        }
2695        private INodeSection(
2696            com.google.protobuf.CodedInputStream input,
2697            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2698            throws com.google.protobuf.InvalidProtocolBufferException {
2699          initFields();
2700          int mutable_bitField0_ = 0;
2701          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2702              com.google.protobuf.UnknownFieldSet.newBuilder();
2703          try {
2704            boolean done = false;
2705            while (!done) {
2706              int tag = input.readTag();
2707              switch (tag) {
2708                case 0:
2709                  done = true;
2710                  break;
2711                default: {
2712                  if (!parseUnknownField(input, unknownFields,
2713                                         extensionRegistry, tag)) {
2714                    done = true;
2715                  }
2716                  break;
2717                }
2718                case 8: {
2719                  bitField0_ |= 0x00000001;
2720                  lastInodeId_ = input.readUInt64();
2721                  break;
2722                }
2723                case 16: {
2724                  bitField0_ |= 0x00000002;
2725                  numInodes_ = input.readUInt64();
2726                  break;
2727                }
2728              }
2729            }
2730          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2731            throw e.setUnfinishedMessage(this);
2732          } catch (java.io.IOException e) {
2733            throw new com.google.protobuf.InvalidProtocolBufferException(
2734                e.getMessage()).setUnfinishedMessage(this);
2735          } finally {
2736            this.unknownFields = unknownFields.build();
2737            makeExtensionsImmutable();
2738          }
2739        }
2740        public static final com.google.protobuf.Descriptors.Descriptor
2741            getDescriptor() {
2742          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
2743        }
2744    
2745        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2746            internalGetFieldAccessorTable() {
2747          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable
2748              .ensureFieldAccessorsInitialized(
2749                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.Builder.class);
2750        }
2751    
2752        public static com.google.protobuf.Parser<INodeSection> PARSER =
2753            new com.google.protobuf.AbstractParser<INodeSection>() {
2754          public INodeSection parsePartialFrom(
2755              com.google.protobuf.CodedInputStream input,
2756              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2757              throws com.google.protobuf.InvalidProtocolBufferException {
2758            return new INodeSection(input, extensionRegistry);
2759          }
2760        };
2761    
2762        @java.lang.Override
2763        public com.google.protobuf.Parser<INodeSection> getParserForType() {
2764          return PARSER;
2765        }
2766    
2767        public interface FileUnderConstructionFeatureOrBuilder
2768            extends com.google.protobuf.MessageOrBuilder {
2769    
2770          // optional string clientName = 1;
2771          /**
2772           * <code>optional string clientName = 1;</code>
2773           */
2774          boolean hasClientName();
2775          /**
2776           * <code>optional string clientName = 1;</code>
2777           */
2778          java.lang.String getClientName();
2779          /**
2780           * <code>optional string clientName = 1;</code>
2781           */
2782          com.google.protobuf.ByteString
2783              getClientNameBytes();
2784    
2785          // optional string clientMachine = 2;
2786          /**
2787           * <code>optional string clientMachine = 2;</code>
2788           */
2789          boolean hasClientMachine();
2790          /**
2791           * <code>optional string clientMachine = 2;</code>
2792           */
2793          java.lang.String getClientMachine();
2794          /**
2795           * <code>optional string clientMachine = 2;</code>
2796           */
2797          com.google.protobuf.ByteString
2798              getClientMachineBytes();
2799        }
2800        /**
2801         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature}
2802         *
2803         * <pre>
2804         **
2805         * under-construction feature for INodeFile
2806         * </pre>
2807         */
2808        public static final class FileUnderConstructionFeature extends
2809            com.google.protobuf.GeneratedMessage
2810            implements FileUnderConstructionFeatureOrBuilder {
2811          // Use FileUnderConstructionFeature.newBuilder() to construct.
2812          private FileUnderConstructionFeature(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2813            super(builder);
2814            this.unknownFields = builder.getUnknownFields();
2815          }
2816          private FileUnderConstructionFeature(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2817    
2818          private static final FileUnderConstructionFeature defaultInstance;
2819          public static FileUnderConstructionFeature getDefaultInstance() {
2820            return defaultInstance;
2821          }
2822    
2823          public FileUnderConstructionFeature getDefaultInstanceForType() {
2824            return defaultInstance;
2825          }
2826    
2827          private final com.google.protobuf.UnknownFieldSet unknownFields;
2828          @java.lang.Override
2829          public final com.google.protobuf.UnknownFieldSet
2830              getUnknownFields() {
2831            return this.unknownFields;
2832          }
2833          private FileUnderConstructionFeature(
2834              com.google.protobuf.CodedInputStream input,
2835              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2836              throws com.google.protobuf.InvalidProtocolBufferException {
2837            initFields();
2838            int mutable_bitField0_ = 0;
2839            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2840                com.google.protobuf.UnknownFieldSet.newBuilder();
2841            try {
2842              boolean done = false;
2843              while (!done) {
2844                int tag = input.readTag();
2845                switch (tag) {
2846                  case 0:
2847                    done = true;
2848                    break;
2849                  default: {
2850                    if (!parseUnknownField(input, unknownFields,
2851                                           extensionRegistry, tag)) {
2852                      done = true;
2853                    }
2854                    break;
2855                  }
2856                  case 10: {
2857                    bitField0_ |= 0x00000001;
2858                    clientName_ = input.readBytes();
2859                    break;
2860                  }
2861                  case 18: {
2862                    bitField0_ |= 0x00000002;
2863                    clientMachine_ = input.readBytes();
2864                    break;
2865                  }
2866                }
2867              }
2868            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2869              throw e.setUnfinishedMessage(this);
2870            } catch (java.io.IOException e) {
2871              throw new com.google.protobuf.InvalidProtocolBufferException(
2872                  e.getMessage()).setUnfinishedMessage(this);
2873            } finally {
2874              this.unknownFields = unknownFields.build();
2875              makeExtensionsImmutable();
2876            }
2877          }
2878          public static final com.google.protobuf.Descriptors.Descriptor
2879              getDescriptor() {
2880            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
2881          }
2882    
2883          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2884              internalGetFieldAccessorTable() {
2885            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable
2886                .ensureFieldAccessorsInitialized(
2887                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder.class);
2888          }
2889    
2890          public static com.google.protobuf.Parser<FileUnderConstructionFeature> PARSER =
2891              new com.google.protobuf.AbstractParser<FileUnderConstructionFeature>() {
2892            public FileUnderConstructionFeature parsePartialFrom(
2893                com.google.protobuf.CodedInputStream input,
2894                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2895                throws com.google.protobuf.InvalidProtocolBufferException {
2896              return new FileUnderConstructionFeature(input, extensionRegistry);
2897            }
2898          };
2899    
2900          @java.lang.Override
2901          public com.google.protobuf.Parser<FileUnderConstructionFeature> getParserForType() {
2902            return PARSER;
2903          }
2904    
2905          private int bitField0_;
2906          // optional string clientName = 1;
2907          public static final int CLIENTNAME_FIELD_NUMBER = 1;
2908          private java.lang.Object clientName_;
2909          /**
2910           * <code>optional string clientName = 1;</code>
2911           */
2912          public boolean hasClientName() {
2913            return ((bitField0_ & 0x00000001) == 0x00000001);
2914          }
2915          /**
2916           * <code>optional string clientName = 1;</code>
2917           */
2918          public java.lang.String getClientName() {
2919            java.lang.Object ref = clientName_;
2920            if (ref instanceof java.lang.String) {
2921              return (java.lang.String) ref;
2922            } else {
2923              com.google.protobuf.ByteString bs = 
2924                  (com.google.protobuf.ByteString) ref;
2925              java.lang.String s = bs.toStringUtf8();
2926              if (bs.isValidUtf8()) {
2927                clientName_ = s;
2928              }
2929              return s;
2930            }
2931          }
2932          /**
2933           * <code>optional string clientName = 1;</code>
2934           */
2935          public com.google.protobuf.ByteString
2936              getClientNameBytes() {
2937            java.lang.Object ref = clientName_;
2938            if (ref instanceof java.lang.String) {
2939              com.google.protobuf.ByteString b = 
2940                  com.google.protobuf.ByteString.copyFromUtf8(
2941                      (java.lang.String) ref);
2942              clientName_ = b;
2943              return b;
2944            } else {
2945              return (com.google.protobuf.ByteString) ref;
2946            }
2947          }
2948    
2949          // optional string clientMachine = 2;
2950          public static final int CLIENTMACHINE_FIELD_NUMBER = 2;
2951          private java.lang.Object clientMachine_;
2952          /**
2953           * <code>optional string clientMachine = 2;</code>
2954           */
2955          public boolean hasClientMachine() {
2956            return ((bitField0_ & 0x00000002) == 0x00000002);
2957          }
2958          /**
2959           * <code>optional string clientMachine = 2;</code>
2960           */
2961          public java.lang.String getClientMachine() {
2962            java.lang.Object ref = clientMachine_;
2963            if (ref instanceof java.lang.String) {
2964              return (java.lang.String) ref;
2965            } else {
2966              com.google.protobuf.ByteString bs = 
2967                  (com.google.protobuf.ByteString) ref;
2968              java.lang.String s = bs.toStringUtf8();
2969              if (bs.isValidUtf8()) {
2970                clientMachine_ = s;
2971              }
2972              return s;
2973            }
2974          }
2975          /**
2976           * <code>optional string clientMachine = 2;</code>
2977           */
2978          public com.google.protobuf.ByteString
2979              getClientMachineBytes() {
2980            java.lang.Object ref = clientMachine_;
2981            if (ref instanceof java.lang.String) {
2982              com.google.protobuf.ByteString b = 
2983                  com.google.protobuf.ByteString.copyFromUtf8(
2984                      (java.lang.String) ref);
2985              clientMachine_ = b;
2986              return b;
2987            } else {
2988              return (com.google.protobuf.ByteString) ref;
2989            }
2990          }
2991    
2992          private void initFields() {
2993            clientName_ = "";
2994            clientMachine_ = "";
2995          }
2996          private byte memoizedIsInitialized = -1;
2997          public final boolean isInitialized() {
2998            byte isInitialized = memoizedIsInitialized;
2999            if (isInitialized != -1) return isInitialized == 1;
3000    
3001            memoizedIsInitialized = 1;
3002            return true;
3003          }
3004    
3005          public void writeTo(com.google.protobuf.CodedOutputStream output)
3006                              throws java.io.IOException {
3007            getSerializedSize();
3008            if (((bitField0_ & 0x00000001) == 0x00000001)) {
3009              output.writeBytes(1, getClientNameBytes());
3010            }
3011            if (((bitField0_ & 0x00000002) == 0x00000002)) {
3012              output.writeBytes(2, getClientMachineBytes());
3013            }
3014            getUnknownFields().writeTo(output);
3015          }
3016    
3017          private int memoizedSerializedSize = -1;
3018          public int getSerializedSize() {
3019            int size = memoizedSerializedSize;
3020            if (size != -1) return size;
3021    
3022            size = 0;
3023            if (((bitField0_ & 0x00000001) == 0x00000001)) {
3024              size += com.google.protobuf.CodedOutputStream
3025                .computeBytesSize(1, getClientNameBytes());
3026            }
3027            if (((bitField0_ & 0x00000002) == 0x00000002)) {
3028              size += com.google.protobuf.CodedOutputStream
3029                .computeBytesSize(2, getClientMachineBytes());
3030            }
3031            size += getUnknownFields().getSerializedSize();
3032            memoizedSerializedSize = size;
3033            return size;
3034          }
3035    
3036          private static final long serialVersionUID = 0L;
3037          @java.lang.Override
3038          protected java.lang.Object writeReplace()
3039              throws java.io.ObjectStreamException {
3040            return super.writeReplace();
3041          }
3042    
3043          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3044              com.google.protobuf.ByteString data)
3045              throws com.google.protobuf.InvalidProtocolBufferException {
3046            return PARSER.parseFrom(data);
3047          }
3048          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3049              com.google.protobuf.ByteString data,
3050              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3051              throws com.google.protobuf.InvalidProtocolBufferException {
3052            return PARSER.parseFrom(data, extensionRegistry);
3053          }
3054          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(byte[] data)
3055              throws com.google.protobuf.InvalidProtocolBufferException {
3056            return PARSER.parseFrom(data);
3057          }
3058          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3059              byte[] data,
3060              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3061              throws com.google.protobuf.InvalidProtocolBufferException {
3062            return PARSER.parseFrom(data, extensionRegistry);
3063          }
3064          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(java.io.InputStream input)
3065              throws java.io.IOException {
3066            return PARSER.parseFrom(input);
3067          }
3068          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3069              java.io.InputStream input,
3070              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3071              throws java.io.IOException {
3072            return PARSER.parseFrom(input, extensionRegistry);
3073          }
3074          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseDelimitedFrom(java.io.InputStream input)
3075              throws java.io.IOException {
3076            return PARSER.parseDelimitedFrom(input);
3077          }
3078          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseDelimitedFrom(
3079              java.io.InputStream input,
3080              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3081              throws java.io.IOException {
3082            return PARSER.parseDelimitedFrom(input, extensionRegistry);
3083          }
3084          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3085              com.google.protobuf.CodedInputStream input)
3086              throws java.io.IOException {
3087            return PARSER.parseFrom(input);
3088          }
3089          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3090              com.google.protobuf.CodedInputStream input,
3091              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3092              throws java.io.IOException {
3093            return PARSER.parseFrom(input, extensionRegistry);
3094          }
3095    
3096          public static Builder newBuilder() { return Builder.create(); }
3097          public Builder newBuilderForType() { return newBuilder(); }
3098          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature prototype) {
3099            return newBuilder().mergeFrom(prototype);
3100          }
3101          public Builder toBuilder() { return newBuilder(this); }
3102    
3103          @java.lang.Override
3104          protected Builder newBuilderForType(
3105              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3106            Builder builder = new Builder(parent);
3107            return builder;
3108          }
3109          /**
3110           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature}
3111           *
3112           * <pre>
3113           **
3114           * under-construction feature for INodeFile
3115           * </pre>
3116           */
3117          public static final class Builder extends
3118              com.google.protobuf.GeneratedMessage.Builder<Builder>
3119             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder {
3120            public static final com.google.protobuf.Descriptors.Descriptor
3121                getDescriptor() {
3122              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
3123            }
3124    
3125            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3126                internalGetFieldAccessorTable() {
3127              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable
3128                  .ensureFieldAccessorsInitialized(
3129                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder.class);
3130            }
3131    
3132            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.newBuilder()
3133            private Builder() {
3134              maybeForceBuilderInitialization();
3135            }
3136    
3137            private Builder(
3138                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3139              super(parent);
3140              maybeForceBuilderInitialization();
3141            }
3142            private void maybeForceBuilderInitialization() {
3143              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3144              }
3145            }
3146            private static Builder create() {
3147              return new Builder();
3148            }
3149    
3150            public Builder clear() {
3151              super.clear();
3152              clientName_ = "";
3153              bitField0_ = (bitField0_ & ~0x00000001);
3154              clientMachine_ = "";
3155              bitField0_ = (bitField0_ & ~0x00000002);
3156              return this;
3157            }
3158    
3159            public Builder clone() {
3160              return create().mergeFrom(buildPartial());
3161            }
3162    
3163            public com.google.protobuf.Descriptors.Descriptor
3164                getDescriptorForType() {
3165              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
3166            }
3167    
3168            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getDefaultInstanceForType() {
3169              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
3170            }
3171    
3172            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature build() {
3173              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature result = buildPartial();
3174              if (!result.isInitialized()) {
3175                throw newUninitializedMessageException(result);
3176              }
3177              return result;
3178            }
3179    
3180            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature buildPartial() {
3181              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature(this);
3182              int from_bitField0_ = bitField0_;
3183              int to_bitField0_ = 0;
3184              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3185                to_bitField0_ |= 0x00000001;
3186              }
3187              result.clientName_ = clientName_;
3188              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3189                to_bitField0_ |= 0x00000002;
3190              }
3191              result.clientMachine_ = clientMachine_;
3192              result.bitField0_ = to_bitField0_;
3193              onBuilt();
3194              return result;
3195            }
3196    
3197            public Builder mergeFrom(com.google.protobuf.Message other) {
3198              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature) {
3199                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature)other);
3200              } else {
3201                super.mergeFrom(other);
3202                return this;
3203              }
3204            }
3205    
3206            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature other) {
3207              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance()) return this;
3208              if (other.hasClientName()) {
3209                bitField0_ |= 0x00000001;
3210                clientName_ = other.clientName_;
3211                onChanged();
3212              }
3213              if (other.hasClientMachine()) {
3214                bitField0_ |= 0x00000002;
3215                clientMachine_ = other.clientMachine_;
3216                onChanged();
3217              }
3218              this.mergeUnknownFields(other.getUnknownFields());
3219              return this;
3220            }
3221    
3222            public final boolean isInitialized() {
3223              return true;
3224            }
3225    
3226            public Builder mergeFrom(
3227                com.google.protobuf.CodedInputStream input,
3228                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3229                throws java.io.IOException {
3230              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parsedMessage = null;
3231              try {
3232                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3233              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3234                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature) e.getUnfinishedMessage();
3235                throw e;
3236              } finally {
3237                if (parsedMessage != null) {
3238                  mergeFrom(parsedMessage);
3239                }
3240              }
3241              return this;
3242            }
3243            private int bitField0_;
3244    
3245            // optional string clientName = 1;
3246            private java.lang.Object clientName_ = "";
3247            /**
3248             * <code>optional string clientName = 1;</code>
3249             */
3250            public boolean hasClientName() {
3251              return ((bitField0_ & 0x00000001) == 0x00000001);
3252            }
3253            /**
3254             * <code>optional string clientName = 1;</code>
3255             */
3256            public java.lang.String getClientName() {
3257              java.lang.Object ref = clientName_;
3258              if (!(ref instanceof java.lang.String)) {
3259                java.lang.String s = ((com.google.protobuf.ByteString) ref)
3260                    .toStringUtf8();
3261                clientName_ = s;
3262                return s;
3263              } else {
3264                return (java.lang.String) ref;
3265              }
3266            }
3267            /**
3268             * <code>optional string clientName = 1;</code>
3269             */
3270            public com.google.protobuf.ByteString
3271                getClientNameBytes() {
3272              java.lang.Object ref = clientName_;
3273              if (ref instanceof String) {
3274                com.google.protobuf.ByteString b = 
3275                    com.google.protobuf.ByteString.copyFromUtf8(
3276                        (java.lang.String) ref);
3277                clientName_ = b;
3278                return b;
3279              } else {
3280                return (com.google.protobuf.ByteString) ref;
3281              }
3282            }
3283            /**
3284             * <code>optional string clientName = 1;</code>
3285             */
3286            public Builder setClientName(
3287                java.lang.String value) {
3288              if (value == null) {
3289        throw new NullPointerException();
3290      }
3291      bitField0_ |= 0x00000001;
3292              clientName_ = value;
3293              onChanged();
3294              return this;
3295            }
3296            /**
3297             * <code>optional string clientName = 1;</code>
3298             */
3299            public Builder clearClientName() {
3300              bitField0_ = (bitField0_ & ~0x00000001);
3301              clientName_ = getDefaultInstance().getClientName();
3302              onChanged();
3303              return this;
3304            }
3305            /**
3306             * <code>optional string clientName = 1;</code>
3307             */
3308            public Builder setClientNameBytes(
3309                com.google.protobuf.ByteString value) {
3310              if (value == null) {
3311        throw new NullPointerException();
3312      }
3313      bitField0_ |= 0x00000001;
3314              clientName_ = value;
3315              onChanged();
3316              return this;
3317            }
3318    
3319            // optional string clientMachine = 2;
3320            private java.lang.Object clientMachine_ = "";
3321            /**
3322             * <code>optional string clientMachine = 2;</code>
3323             */
3324            public boolean hasClientMachine() {
3325              return ((bitField0_ & 0x00000002) == 0x00000002);
3326            }
3327            /**
3328             * <code>optional string clientMachine = 2;</code>
3329             */
3330            public java.lang.String getClientMachine() {
3331              java.lang.Object ref = clientMachine_;
3332              if (!(ref instanceof java.lang.String)) {
3333                java.lang.String s = ((com.google.protobuf.ByteString) ref)
3334                    .toStringUtf8();
3335                clientMachine_ = s;
3336                return s;
3337              } else {
3338                return (java.lang.String) ref;
3339              }
3340            }
3341            /**
3342             * <code>optional string clientMachine = 2;</code>
3343             */
3344            public com.google.protobuf.ByteString
3345                getClientMachineBytes() {
3346              java.lang.Object ref = clientMachine_;
3347              if (ref instanceof String) {
3348                com.google.protobuf.ByteString b = 
3349                    com.google.protobuf.ByteString.copyFromUtf8(
3350                        (java.lang.String) ref);
3351                clientMachine_ = b;
3352                return b;
3353              } else {
3354                return (com.google.protobuf.ByteString) ref;
3355              }
3356            }
3357            /**
3358             * <code>optional string clientMachine = 2;</code>
3359             */
3360            public Builder setClientMachine(
3361                java.lang.String value) {
3362              if (value == null) {
3363        throw new NullPointerException();
3364      }
3365      bitField0_ |= 0x00000002;
3366              clientMachine_ = value;
3367              onChanged();
3368              return this;
3369            }
3370            /**
3371             * <code>optional string clientMachine = 2;</code>
3372             */
3373            public Builder clearClientMachine() {
3374              bitField0_ = (bitField0_ & ~0x00000002);
3375              clientMachine_ = getDefaultInstance().getClientMachine();
3376              onChanged();
3377              return this;
3378            }
3379            /**
3380             * <code>optional string clientMachine = 2;</code>
3381             */
3382            public Builder setClientMachineBytes(
3383                com.google.protobuf.ByteString value) {
3384              if (value == null) {
3385        throw new NullPointerException();
3386      }
3387      bitField0_ |= 0x00000002;
3388              clientMachine_ = value;
3389              onChanged();
3390              return this;
3391            }
3392    
3393            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature)
3394          }
3395    
3396          static {
3397            defaultInstance = new FileUnderConstructionFeature(true);
3398            defaultInstance.initFields();
3399          }
3400    
3401          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature)
3402        }
3403    
3404        public interface AclFeatureProtoOrBuilder
3405            extends com.google.protobuf.MessageOrBuilder {
3406    
3407          // repeated fixed32 entries = 2 [packed = true];
3408          /**
3409           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3410           *
3411           * <pre>
3412           **
3413           * An ACL entry is represented by a 32-bit integer in Big Endian
3414           * format. The bits can be divided in four segments:
3415           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3416           *
3417           * [0:2) -- reserved for futute uses.
3418           * [2:26) -- the name of the entry, which is an ID that points to a
3419           * string in the StringTableSection.
3420           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3421           * [27:29) -- the type of the entry (AclEntryTypeProto)
3422           * [29:32) -- the permission of the entry (FsActionProto)
3423           * </pre>
3424           */
3425          java.util.List<java.lang.Integer> getEntriesList();
3426          /**
3427           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3428           *
3429           * <pre>
3430           **
3431           * An ACL entry is represented by a 32-bit integer in Big Endian
3432           * format. The bits can be divided in four segments:
3433           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3434           *
3435           * [0:2) -- reserved for futute uses.
3436           * [2:26) -- the name of the entry, which is an ID that points to a
3437           * string in the StringTableSection.
3438           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3439           * [27:29) -- the type of the entry (AclEntryTypeProto)
3440           * [29:32) -- the permission of the entry (FsActionProto)
3441           * </pre>
3442           */
3443          int getEntriesCount();
3444          /**
3445           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3446           *
3447           * <pre>
3448           **
3449           * An ACL entry is represented by a 32-bit integer in Big Endian
3450           * format. The bits can be divided in four segments:
3451           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3452           *
3453           * [0:2) -- reserved for futute uses.
3454           * [2:26) -- the name of the entry, which is an ID that points to a
3455           * string in the StringTableSection.
3456           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3457           * [27:29) -- the type of the entry (AclEntryTypeProto)
3458           * [29:32) -- the permission of the entry (FsActionProto)
3459           * </pre>
3460           */
3461          int getEntries(int index);
3462        }
3463        /**
3464         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.AclFeatureProto}
3465         */
3466        public static final class AclFeatureProto extends
3467            com.google.protobuf.GeneratedMessage
3468            implements AclFeatureProtoOrBuilder {
3469          // Use AclFeatureProto.newBuilder() to construct.
3470          private AclFeatureProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3471            super(builder);
3472            this.unknownFields = builder.getUnknownFields();
3473          }
3474          private AclFeatureProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3475    
3476          private static final AclFeatureProto defaultInstance;
3477          public static AclFeatureProto getDefaultInstance() {
3478            return defaultInstance;
3479          }
3480    
3481          public AclFeatureProto getDefaultInstanceForType() {
3482            return defaultInstance;
3483          }
3484    
3485          private final com.google.protobuf.UnknownFieldSet unknownFields;
3486          @java.lang.Override
3487          public final com.google.protobuf.UnknownFieldSet
3488              getUnknownFields() {
3489            return this.unknownFields;
3490          }
3491          private AclFeatureProto(
3492              com.google.protobuf.CodedInputStream input,
3493              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3494              throws com.google.protobuf.InvalidProtocolBufferException {
3495            initFields();
3496            int mutable_bitField0_ = 0;
3497            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3498                com.google.protobuf.UnknownFieldSet.newBuilder();
3499            try {
3500              boolean done = false;
3501              while (!done) {
3502                int tag = input.readTag();
3503                switch (tag) {
3504                  case 0:
3505                    done = true;
3506                    break;
3507                  default: {
3508                    if (!parseUnknownField(input, unknownFields,
3509                                           extensionRegistry, tag)) {
3510                      done = true;
3511                    }
3512                    break;
3513                  }
3514                  case 21: {
3515                    if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
3516                      entries_ = new java.util.ArrayList<java.lang.Integer>();
3517                      mutable_bitField0_ |= 0x00000001;
3518                    }
3519                    entries_.add(input.readFixed32());
3520                    break;
3521                  }
3522                  case 18: {
3523                    int length = input.readRawVarint32();
3524                    int limit = input.pushLimit(length);
3525                    if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
3526                      entries_ = new java.util.ArrayList<java.lang.Integer>();
3527                      mutable_bitField0_ |= 0x00000001;
3528                    }
3529                    while (input.getBytesUntilLimit() > 0) {
3530                      entries_.add(input.readFixed32());
3531                    }
3532                    input.popLimit(limit);
3533                    break;
3534                  }
3535                }
3536              }
3537            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3538              throw e.setUnfinishedMessage(this);
3539            } catch (java.io.IOException e) {
3540              throw new com.google.protobuf.InvalidProtocolBufferException(
3541                  e.getMessage()).setUnfinishedMessage(this);
3542            } finally {
3543              if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
3544                entries_ = java.util.Collections.unmodifiableList(entries_);
3545              }
3546              this.unknownFields = unknownFields.build();
3547              makeExtensionsImmutable();
3548            }
3549          }
3550          public static final com.google.protobuf.Descriptors.Descriptor
3551              getDescriptor() {
3552            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
3553          }
3554    
3555          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3556              internalGetFieldAccessorTable() {
3557            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable
3558                .ensureFieldAccessorsInitialized(
3559                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder.class);
3560          }
3561    
3562          public static com.google.protobuf.Parser<AclFeatureProto> PARSER =
3563              new com.google.protobuf.AbstractParser<AclFeatureProto>() {
3564            public AclFeatureProto parsePartialFrom(
3565                com.google.protobuf.CodedInputStream input,
3566                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3567                throws com.google.protobuf.InvalidProtocolBufferException {
3568              return new AclFeatureProto(input, extensionRegistry);
3569            }
3570          };
3571    
3572          @java.lang.Override
3573          public com.google.protobuf.Parser<AclFeatureProto> getParserForType() {
3574            return PARSER;
3575          }
3576    
3577          // repeated fixed32 entries = 2 [packed = true];
3578          public static final int ENTRIES_FIELD_NUMBER = 2;
3579          private java.util.List<java.lang.Integer> entries_;
3580          /**
3581           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3582           *
3583           * <pre>
3584           **
3585           * An ACL entry is represented by a 32-bit integer in Big Endian
3586           * format. The bits can be divided in four segments:
3587           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3588           *
3589           * [0:2) -- reserved for futute uses.
3590           * [2:26) -- the name of the entry, which is an ID that points to a
3591           * string in the StringTableSection.
3592           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3593           * [27:29) -- the type of the entry (AclEntryTypeProto)
3594           * [29:32) -- the permission of the entry (FsActionProto)
3595           * </pre>
3596           */
3597          public java.util.List<java.lang.Integer>
3598              getEntriesList() {
3599            return entries_;
3600          }
3601          /**
3602           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3603           *
3604           * <pre>
3605           **
3606           * An ACL entry is represented by a 32-bit integer in Big Endian
3607           * format. The bits can be divided in four segments:
3608           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3609           *
3610           * [0:2) -- reserved for futute uses.
3611           * [2:26) -- the name of the entry, which is an ID that points to a
3612           * string in the StringTableSection.
3613           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3614           * [27:29) -- the type of the entry (AclEntryTypeProto)
3615           * [29:32) -- the permission of the entry (FsActionProto)
3616           * </pre>
3617           */
3618          public int getEntriesCount() {
3619            return entries_.size();
3620          }
3621          /**
3622           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3623           *
3624           * <pre>
3625           **
3626           * An ACL entry is represented by a 32-bit integer in Big Endian
3627           * format. The bits can be divided in four segments:
3628           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3629           *
3630           * [0:2) -- reserved for futute uses.
3631           * [2:26) -- the name of the entry, which is an ID that points to a
3632           * string in the StringTableSection.
3633           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3634           * [27:29) -- the type of the entry (AclEntryTypeProto)
3635           * [29:32) -- the permission of the entry (FsActionProto)
3636           * </pre>
3637           */
3638          public int getEntries(int index) {
3639            return entries_.get(index);
3640          }
3641          private int entriesMemoizedSerializedSize = -1;
3642    
3643          private void initFields() {
3644            entries_ = java.util.Collections.emptyList();
3645          }
3646          private byte memoizedIsInitialized = -1;
3647          public final boolean isInitialized() {
3648            byte isInitialized = memoizedIsInitialized;
3649            if (isInitialized != -1) return isInitialized == 1;
3650    
3651            memoizedIsInitialized = 1;
3652            return true;
3653          }
3654    
3655          public void writeTo(com.google.protobuf.CodedOutputStream output)
3656                              throws java.io.IOException {
3657            getSerializedSize();
3658            if (getEntriesList().size() > 0) {
3659              output.writeRawVarint32(18);
3660              output.writeRawVarint32(entriesMemoizedSerializedSize);
3661            }
3662            for (int i = 0; i < entries_.size(); i++) {
3663              output.writeFixed32NoTag(entries_.get(i));
3664            }
3665            getUnknownFields().writeTo(output);
3666          }
3667    
3668          private int memoizedSerializedSize = -1;
3669          public int getSerializedSize() {
3670            int size = memoizedSerializedSize;
3671            if (size != -1) return size;
3672    
3673            size = 0;
3674            {
3675              int dataSize = 0;
3676              dataSize = 4 * getEntriesList().size();
3677              size += dataSize;
3678              if (!getEntriesList().isEmpty()) {
3679                size += 1;
3680                size += com.google.protobuf.CodedOutputStream
3681                    .computeInt32SizeNoTag(dataSize);
3682              }
3683              entriesMemoizedSerializedSize = dataSize;
3684            }
3685            size += getUnknownFields().getSerializedSize();
3686            memoizedSerializedSize = size;
3687            return size;
3688          }
3689    
3690          private static final long serialVersionUID = 0L;
3691          @java.lang.Override
3692          protected java.lang.Object writeReplace()
3693              throws java.io.ObjectStreamException {
3694            return super.writeReplace();
3695          }
3696    
3697          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3698              com.google.protobuf.ByteString data)
3699              throws com.google.protobuf.InvalidProtocolBufferException {
3700            return PARSER.parseFrom(data);
3701          }
3702          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3703              com.google.protobuf.ByteString data,
3704              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3705              throws com.google.protobuf.InvalidProtocolBufferException {
3706            return PARSER.parseFrom(data, extensionRegistry);
3707          }
3708          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(byte[] data)
3709              throws com.google.protobuf.InvalidProtocolBufferException {
3710            return PARSER.parseFrom(data);
3711          }
3712          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3713              byte[] data,
3714              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3715              throws com.google.protobuf.InvalidProtocolBufferException {
3716            return PARSER.parseFrom(data, extensionRegistry);
3717          }
3718          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(java.io.InputStream input)
3719              throws java.io.IOException {
3720            return PARSER.parseFrom(input);
3721          }
3722          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3723              java.io.InputStream input,
3724              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3725              throws java.io.IOException {
3726            return PARSER.parseFrom(input, extensionRegistry);
3727          }
3728          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseDelimitedFrom(java.io.InputStream input)
3729              throws java.io.IOException {
3730            return PARSER.parseDelimitedFrom(input);
3731          }
3732          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseDelimitedFrom(
3733              java.io.InputStream input,
3734              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3735              throws java.io.IOException {
3736            return PARSER.parseDelimitedFrom(input, extensionRegistry);
3737          }
3738          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3739              com.google.protobuf.CodedInputStream input)
3740              throws java.io.IOException {
3741            return PARSER.parseFrom(input);
3742          }
3743          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3744              com.google.protobuf.CodedInputStream input,
3745              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3746              throws java.io.IOException {
3747            return PARSER.parseFrom(input, extensionRegistry);
3748          }
3749    
3750          public static Builder newBuilder() { return Builder.create(); }
3751          public Builder newBuilderForType() { return newBuilder(); }
3752          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto prototype) {
3753            return newBuilder().mergeFrom(prototype);
3754          }
3755          public Builder toBuilder() { return newBuilder(this); }
3756    
3757          @java.lang.Override
3758          protected Builder newBuilderForType(
3759              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3760            Builder builder = new Builder(parent);
3761            return builder;
3762          }
3763          /**
3764           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.AclFeatureProto}
3765           */
3766          public static final class Builder extends
3767              com.google.protobuf.GeneratedMessage.Builder<Builder>
3768             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder {
3769            public static final com.google.protobuf.Descriptors.Descriptor
3770                getDescriptor() {
3771              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
3772            }
3773    
3774            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3775                internalGetFieldAccessorTable() {
3776              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable
3777                  .ensureFieldAccessorsInitialized(
3778                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder.class);
3779            }
3780    
3781            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.newBuilder()
3782            private Builder() {
3783              maybeForceBuilderInitialization();
3784            }
3785    
3786            private Builder(
3787                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3788              super(parent);
3789              maybeForceBuilderInitialization();
3790            }
3791            private void maybeForceBuilderInitialization() {
3792              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3793              }
3794            }
3795            private static Builder create() {
3796              return new Builder();
3797            }
3798    
3799            public Builder clear() {
3800              super.clear();
3801              entries_ = java.util.Collections.emptyList();
3802              bitField0_ = (bitField0_ & ~0x00000001);
3803              return this;
3804            }
3805    
3806            public Builder clone() {
3807              return create().mergeFrom(buildPartial());
3808            }
3809    
3810            public com.google.protobuf.Descriptors.Descriptor
3811                getDescriptorForType() {
3812              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
3813            }
3814    
3815            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getDefaultInstanceForType() {
3816              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
3817            }
3818    
3819            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto build() {
3820              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto result = buildPartial();
3821              if (!result.isInitialized()) {
3822                throw newUninitializedMessageException(result);
3823              }
3824              return result;
3825            }
3826    
3827            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto buildPartial() {
3828              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto(this);
3829              int from_bitField0_ = bitField0_;
3830              if (((bitField0_ & 0x00000001) == 0x00000001)) {
3831                entries_ = java.util.Collections.unmodifiableList(entries_);
3832                bitField0_ = (bitField0_ & ~0x00000001);
3833              }
3834              result.entries_ = entries_;
3835              onBuilt();
3836              return result;
3837            }
3838    
3839            public Builder mergeFrom(com.google.protobuf.Message other) {
3840              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto) {
3841                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto)other);
3842              } else {
3843                super.mergeFrom(other);
3844                return this;
3845              }
3846            }
3847    
3848            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto other) {
3849              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance()) return this;
3850              if (!other.entries_.isEmpty()) {
3851                if (entries_.isEmpty()) {
3852                  entries_ = other.entries_;
3853                  bitField0_ = (bitField0_ & ~0x00000001);
3854                } else {
3855                  ensureEntriesIsMutable();
3856                  entries_.addAll(other.entries_);
3857                }
3858                onChanged();
3859              }
3860              this.mergeUnknownFields(other.getUnknownFields());
3861              return this;
3862            }
3863    
3864            public final boolean isInitialized() {
3865              return true;
3866            }
3867    
3868            public Builder mergeFrom(
3869                com.google.protobuf.CodedInputStream input,
3870                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3871                throws java.io.IOException {
3872              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parsedMessage = null;
3873              try {
3874                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3875              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3876                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto) e.getUnfinishedMessage();
3877                throw e;
3878              } finally {
3879                if (parsedMessage != null) {
3880                  mergeFrom(parsedMessage);
3881                }
3882              }
3883              return this;
3884            }
3885            private int bitField0_;
3886    
3887            // repeated fixed32 entries = 2 [packed = true];
3888            private java.util.List<java.lang.Integer> entries_ = java.util.Collections.emptyList();
3889            private void ensureEntriesIsMutable() {
3890              if (!((bitField0_ & 0x00000001) == 0x00000001)) {
3891                entries_ = new java.util.ArrayList<java.lang.Integer>(entries_);
3892                bitField0_ |= 0x00000001;
3893               }
3894            }
3895            /**
3896             * <code>repeated fixed32 entries = 2 [packed = true];</code>
3897             *
3898             * <pre>
3899             **
3900             * An ACL entry is represented by a 32-bit integer in Big Endian
3901             * format. The bits can be divided in four segments:
3902             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3903             *
3904             * [0:2) -- reserved for futute uses.
3905             * [2:26) -- the name of the entry, which is an ID that points to a
3906             * string in the StringTableSection.
3907             * [26:27) -- the scope of the entry (AclEntryScopeProto)
3908             * [27:29) -- the type of the entry (AclEntryTypeProto)
3909             * [29:32) -- the permission of the entry (FsActionProto)
3910             * </pre>
3911             */
3912            public java.util.List<java.lang.Integer>
3913                getEntriesList() {
3914              return java.util.Collections.unmodifiableList(entries_);
3915            }
3916            /**
3917             * <code>repeated fixed32 entries = 2 [packed = true];</code>
3918             *
3919             * <pre>
3920             **
3921             * An ACL entry is represented by a 32-bit integer in Big Endian
3922             * format. The bits can be divided in four segments:
3923             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3924             *
3925             * [0:2) -- reserved for futute uses.
3926             * [2:26) -- the name of the entry, which is an ID that points to a
3927             * string in the StringTableSection.
3928             * [26:27) -- the scope of the entry (AclEntryScopeProto)
3929             * [27:29) -- the type of the entry (AclEntryTypeProto)
3930             * [29:32) -- the permission of the entry (FsActionProto)
3931             * </pre>
3932             */
3933            public int getEntriesCount() {
3934              return entries_.size();
3935            }
3936            /**
3937             * <code>repeated fixed32 entries = 2 [packed = true];</code>
3938             *
3939             * <pre>
3940             **
3941             * An ACL entry is represented by a 32-bit integer in Big Endian
3942             * format. The bits can be divided in four segments:
3943             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3944             *
3945             * [0:2) -- reserved for futute uses.
3946             * [2:26) -- the name of the entry, which is an ID that points to a
3947             * string in the StringTableSection.
3948             * [26:27) -- the scope of the entry (AclEntryScopeProto)
3949             * [27:29) -- the type of the entry (AclEntryTypeProto)
3950             * [29:32) -- the permission of the entry (FsActionProto)
3951             * </pre>
3952             */
3953            public int getEntries(int index) {
3954              return entries_.get(index);
3955            }
3956            /**
3957             * <code>repeated fixed32 entries = 2 [packed = true];</code>
3958             *
3959             * <pre>
3960             **
3961             * An ACL entry is represented by a 32-bit integer in Big Endian
3962             * format. The bits can be divided in four segments:
3963             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3964             *
3965             * [0:2) -- reserved for futute uses.
3966             * [2:26) -- the name of the entry, which is an ID that points to a
3967             * string in the StringTableSection.
3968             * [26:27) -- the scope of the entry (AclEntryScopeProto)
3969             * [27:29) -- the type of the entry (AclEntryTypeProto)
3970             * [29:32) -- the permission of the entry (FsActionProto)
3971             * </pre>
3972             */
3973            public Builder setEntries(
3974                int index, int value) {
3975              ensureEntriesIsMutable();
3976              entries_.set(index, value);
3977              onChanged();
3978              return this;
3979            }
3980            /**
3981             * <code>repeated fixed32 entries = 2 [packed = true];</code>
3982             *
3983             * <pre>
3984             **
3985             * An ACL entry is represented by a 32-bit integer in Big Endian
3986             * format. The bits can be divided in four segments:
3987             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3988             *
3989             * [0:2) -- reserved for futute uses.
3990             * [2:26) -- the name of the entry, which is an ID that points to a
3991             * string in the StringTableSection.
3992             * [26:27) -- the scope of the entry (AclEntryScopeProto)
3993             * [27:29) -- the type of the entry (AclEntryTypeProto)
3994             * [29:32) -- the permission of the entry (FsActionProto)
3995             * </pre>
3996             */
3997            public Builder addEntries(int value) {
3998              ensureEntriesIsMutable();
3999              entries_.add(value);
4000              onChanged();
4001              return this;
4002            }
4003            /**
4004             * <code>repeated fixed32 entries = 2 [packed = true];</code>
4005             *
4006             * <pre>
4007             **
4008             * An ACL entry is represented by a 32-bit integer in Big Endian
4009             * format. The bits can be divided in four segments:
4010             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
4011             *
4012             * [0:2) -- reserved for futute uses.
4013             * [2:26) -- the name of the entry, which is an ID that points to a
4014             * string in the StringTableSection.
4015             * [26:27) -- the scope of the entry (AclEntryScopeProto)
4016             * [27:29) -- the type of the entry (AclEntryTypeProto)
4017             * [29:32) -- the permission of the entry (FsActionProto)
4018             * </pre>
4019             */
4020            public Builder addAllEntries(
4021                java.lang.Iterable<? extends java.lang.Integer> values) {
4022              ensureEntriesIsMutable();
4023              super.addAll(values, entries_);
4024              onChanged();
4025              return this;
4026            }
4027            /**
4028             * <code>repeated fixed32 entries = 2 [packed = true];</code>
4029             *
4030             * <pre>
4031             **
4032             * An ACL entry is represented by a 32-bit integer in Big Endian
4033             * format. The bits can be divided in four segments:
4034             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
4035             *
4036             * [0:2) -- reserved for futute uses.
4037             * [2:26) -- the name of the entry, which is an ID that points to a
4038             * string in the StringTableSection.
4039             * [26:27) -- the scope of the entry (AclEntryScopeProto)
4040             * [27:29) -- the type of the entry (AclEntryTypeProto)
4041             * [29:32) -- the permission of the entry (FsActionProto)
4042             * </pre>
4043             */
4044            public Builder clearEntries() {
4045              entries_ = java.util.Collections.emptyList();
4046              bitField0_ = (bitField0_ & ~0x00000001);
4047              onChanged();
4048              return this;
4049            }
4050    
4051            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.AclFeatureProto)
4052          }
4053    
4054          static {
4055            defaultInstance = new AclFeatureProto(true);
4056            defaultInstance.initFields();
4057          }
4058    
4059          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.AclFeatureProto)
4060        }
4061    
4062        public interface INodeFileOrBuilder
4063            extends com.google.protobuf.MessageOrBuilder {
4064    
4065          // optional uint32 replication = 1;
4066          /**
4067           * <code>optional uint32 replication = 1;</code>
4068           */
4069          boolean hasReplication();
4070          /**
4071           * <code>optional uint32 replication = 1;</code>
4072           */
4073          int getReplication();
4074    
4075          // optional uint64 modificationTime = 2;
4076          /**
4077           * <code>optional uint64 modificationTime = 2;</code>
4078           */
4079          boolean hasModificationTime();
4080          /**
4081           * <code>optional uint64 modificationTime = 2;</code>
4082           */
4083          long getModificationTime();
4084    
4085          // optional uint64 accessTime = 3;
4086          /**
4087           * <code>optional uint64 accessTime = 3;</code>
4088           */
4089          boolean hasAccessTime();
4090          /**
4091           * <code>optional uint64 accessTime = 3;</code>
4092           */
4093          long getAccessTime();
4094    
4095          // optional uint64 preferredBlockSize = 4;
4096          /**
4097           * <code>optional uint64 preferredBlockSize = 4;</code>
4098           */
4099          boolean hasPreferredBlockSize();
4100          /**
4101           * <code>optional uint64 preferredBlockSize = 4;</code>
4102           */
4103          long getPreferredBlockSize();
4104    
4105          // optional fixed64 permission = 5;
4106          /**
4107           * <code>optional fixed64 permission = 5;</code>
4108           */
4109          boolean hasPermission();
4110          /**
4111           * <code>optional fixed64 permission = 5;</code>
4112           */
4113          long getPermission();
4114    
4115          // repeated .hadoop.hdfs.BlockProto blocks = 6;
4116          /**
4117           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
4118           */
4119          java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> 
4120              getBlocksList();
4121          /**
4122           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
4123           */
4124          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlocks(int index);
4125          /**
4126           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
4127           */
4128          int getBlocksCount();
4129          /**
4130           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
4131           */
4132          java.util.List<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
4133              getBlocksOrBuilderList();
4134          /**
4135           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
4136           */
4137          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlocksOrBuilder(
4138              int index);
4139    
4140          // optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;
4141          /**
4142           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
4143           */
4144          boolean hasFileUC();
4145          /**
4146           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
4147           */
4148          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getFileUC();
4149          /**
4150           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
4151           */
4152          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder getFileUCOrBuilder();
4153    
4154          // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;
4155          /**
4156           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
4157           */
4158          boolean hasAcl();
4159          /**
4160           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
4161           */
4162          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl();
4163          /**
4164           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
4165           */
4166          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder();
4167        }
4168        /**
4169         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeFile}
4170         */
4171        public static final class INodeFile extends
4172            com.google.protobuf.GeneratedMessage
4173            implements INodeFileOrBuilder {
4174          // Use INodeFile.newBuilder() to construct.
4175          private INodeFile(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4176            super(builder);
4177            this.unknownFields = builder.getUnknownFields();
4178          }
4179          private INodeFile(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4180    
4181          private static final INodeFile defaultInstance;
4182          public static INodeFile getDefaultInstance() {
4183            return defaultInstance;
4184          }
4185    
4186          public INodeFile getDefaultInstanceForType() {
4187            return defaultInstance;
4188          }
4189    
4190          private final com.google.protobuf.UnknownFieldSet unknownFields;
4191          @java.lang.Override
4192          public final com.google.protobuf.UnknownFieldSet
4193              getUnknownFields() {
4194            return this.unknownFields;
4195          }
4196          private INodeFile(
4197              com.google.protobuf.CodedInputStream input,
4198              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4199              throws com.google.protobuf.InvalidProtocolBufferException {
4200            initFields();
4201            int mutable_bitField0_ = 0;
4202            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4203                com.google.protobuf.UnknownFieldSet.newBuilder();
4204            try {
4205              boolean done = false;
4206              while (!done) {
4207                int tag = input.readTag();
4208                switch (tag) {
4209                  case 0:
4210                    done = true;
4211                    break;
4212                  default: {
4213                    if (!parseUnknownField(input, unknownFields,
4214                                           extensionRegistry, tag)) {
4215                      done = true;
4216                    }
4217                    break;
4218                  }
4219                  case 8: {
4220                    bitField0_ |= 0x00000001;
4221                    replication_ = input.readUInt32();
4222                    break;
4223                  }
4224                  case 16: {
4225                    bitField0_ |= 0x00000002;
4226                    modificationTime_ = input.readUInt64();
4227                    break;
4228                  }
4229                  case 24: {
4230                    bitField0_ |= 0x00000004;
4231                    accessTime_ = input.readUInt64();
4232                    break;
4233                  }
4234                  case 32: {
4235                    bitField0_ |= 0x00000008;
4236                    preferredBlockSize_ = input.readUInt64();
4237                    break;
4238                  }
4239                  case 41: {
4240                    bitField0_ |= 0x00000010;
4241                    permission_ = input.readFixed64();
4242                    break;
4243                  }
4244                  case 50: {
4245                    if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
4246                      blocks_ = new java.util.ArrayList<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto>();
4247                      mutable_bitField0_ |= 0x00000020;
4248                    }
4249                    blocks_.add(input.readMessage(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.PARSER, extensionRegistry));
4250                    break;
4251                  }
4252                  case 58: {
4253                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder subBuilder = null;
4254                    if (((bitField0_ & 0x00000020) == 0x00000020)) {
4255                      subBuilder = fileUC_.toBuilder();
4256                    }
4257                    fileUC_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.PARSER, extensionRegistry);
4258                    if (subBuilder != null) {
4259                      subBuilder.mergeFrom(fileUC_);
4260                      fileUC_ = subBuilder.buildPartial();
4261                    }
4262                    bitField0_ |= 0x00000020;
4263                    break;
4264                  }
4265                  case 66: {
4266                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder subBuilder = null;
4267                    if (((bitField0_ & 0x00000040) == 0x00000040)) {
4268                      subBuilder = acl_.toBuilder();
4269                    }
4270                    acl_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.PARSER, extensionRegistry);
4271                    if (subBuilder != null) {
4272                      subBuilder.mergeFrom(acl_);
4273                      acl_ = subBuilder.buildPartial();
4274                    }
4275                    bitField0_ |= 0x00000040;
4276                    break;
4277                  }
4278                }
4279              }
4280            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4281              throw e.setUnfinishedMessage(this);
4282            } catch (java.io.IOException e) {
4283              throw new com.google.protobuf.InvalidProtocolBufferException(
4284                  e.getMessage()).setUnfinishedMessage(this);
4285            } finally {
4286              if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
4287                blocks_ = java.util.Collections.unmodifiableList(blocks_);
4288              }
4289              this.unknownFields = unknownFields.build();
4290              makeExtensionsImmutable();
4291            }
4292          }
4293          public static final com.google.protobuf.Descriptors.Descriptor
4294              getDescriptor() {
4295            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
4296          }
4297    
4298          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4299              internalGetFieldAccessorTable() {
4300            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable
4301                .ensureFieldAccessorsInitialized(
4302                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder.class);
4303          }
4304    
4305          public static com.google.protobuf.Parser<INodeFile> PARSER =
4306              new com.google.protobuf.AbstractParser<INodeFile>() {
4307            public INodeFile parsePartialFrom(
4308                com.google.protobuf.CodedInputStream input,
4309                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4310                throws com.google.protobuf.InvalidProtocolBufferException {
4311              return new INodeFile(input, extensionRegistry);
4312            }
4313          };
4314    
4315          @java.lang.Override
4316          public com.google.protobuf.Parser<INodeFile> getParserForType() {
4317            return PARSER;
4318          }
4319    
4320          private int bitField0_;
4321          // optional uint32 replication = 1;
4322          public static final int REPLICATION_FIELD_NUMBER = 1;
4323          private int replication_;
4324          /**
4325           * <code>optional uint32 replication = 1;</code>
4326           */
4327          public boolean hasReplication() {
4328            return ((bitField0_ & 0x00000001) == 0x00000001);
4329          }
4330          /**
4331           * <code>optional uint32 replication = 1;</code>
4332           */
4333          public int getReplication() {
4334            return replication_;
4335          }
4336    
4337          // optional uint64 modificationTime = 2;
4338          public static final int MODIFICATIONTIME_FIELD_NUMBER = 2;
4339          private long modificationTime_;
4340          /**
4341           * <code>optional uint64 modificationTime = 2;</code>
4342           */
4343          public boolean hasModificationTime() {
4344            return ((bitField0_ & 0x00000002) == 0x00000002);
4345          }
4346          /**
4347           * <code>optional uint64 modificationTime = 2;</code>
4348           */
4349          public long getModificationTime() {
4350            return modificationTime_;
4351          }
4352    
4353          // optional uint64 accessTime = 3;
4354          public static final int ACCESSTIME_FIELD_NUMBER = 3;
4355          private long accessTime_;
4356          /**
4357           * <code>optional uint64 accessTime = 3;</code>
4358           */
4359          public boolean hasAccessTime() {
4360            return ((bitField0_ & 0x00000004) == 0x00000004);
4361          }
4362          /**
4363           * <code>optional uint64 accessTime = 3;</code>
4364           */
4365          public long getAccessTime() {
4366            return accessTime_;
4367          }
4368    
4369          // optional uint64 preferredBlockSize = 4;
4370          public static final int PREFERREDBLOCKSIZE_FIELD_NUMBER = 4;
4371          private long preferredBlockSize_;
4372          /**
4373           * <code>optional uint64 preferredBlockSize = 4;</code>
4374           */
4375          public boolean hasPreferredBlockSize() {
4376            return ((bitField0_ & 0x00000008) == 0x00000008);
4377          }
4378          /**
4379           * <code>optional uint64 preferredBlockSize = 4;</code>
4380           */
4381          public long getPreferredBlockSize() {
4382            return preferredBlockSize_;
4383          }
4384    
4385          // optional fixed64 permission = 5;
4386          public static final int PERMISSION_FIELD_NUMBER = 5;
4387          private long permission_;
4388          /**
4389           * <code>optional fixed64 permission = 5;</code>
4390           */
4391          public boolean hasPermission() {
4392            return ((bitField0_ & 0x00000010) == 0x00000010);
4393          }
4394          /**
4395           * <code>optional fixed64 permission = 5;</code>
4396           */
4397          public long getPermission() {
4398            return permission_;
4399          }
4400    
4401          // repeated .hadoop.hdfs.BlockProto blocks = 6;
4402          public static final int BLOCKS_FIELD_NUMBER = 6;
4403          private java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> blocks_;
4404          /**
4405           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
4406           */
4407          public java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> getBlocksList() {
4408            return blocks_;
4409          }
4410          /**
4411           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
4412           */
4413          public java.util.List<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
4414              getBlocksOrBuilderList() {
4415            return blocks_;
4416          }
4417          /**
4418           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
4419           */
4420          public int getBlocksCount() {
4421            return blocks_.size();
4422          }
4423          /**
4424           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
4425           */
4426          public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlocks(int index) {
4427            return blocks_.get(index);
4428          }
4429          /**
4430           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
4431           */
4432          public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlocksOrBuilder(
4433              int index) {
4434            return blocks_.get(index);
4435          }
4436    
4437          // optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;
4438          public static final int FILEUC_FIELD_NUMBER = 7;
4439          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature fileUC_;
4440          /**
4441           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
4442           */
4443          public boolean hasFileUC() {
4444            return ((bitField0_ & 0x00000020) == 0x00000020);
4445          }
4446          /**
4447           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
4448           */
4449          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getFileUC() {
4450            return fileUC_;
4451          }
4452          /**
4453           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
4454           */
4455          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder getFileUCOrBuilder() {
4456            return fileUC_;
4457          }
4458    
4459          // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;
4460          public static final int ACL_FIELD_NUMBER = 8;
4461          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_;
4462          /**
4463           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
4464           */
4465          public boolean hasAcl() {
4466            return ((bitField0_ & 0x00000040) == 0x00000040);
4467          }
4468          /**
4469           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
4470           */
4471          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
4472            return acl_;
4473          }
4474          /**
4475           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
4476           */
4477          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
4478            return acl_;
4479          }
4480    
4481          private void initFields() {
4482            replication_ = 0;
4483            modificationTime_ = 0L;
4484            accessTime_ = 0L;
4485            preferredBlockSize_ = 0L;
4486            permission_ = 0L;
4487            blocks_ = java.util.Collections.emptyList();
4488            fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
4489            acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
4490          }
4491          private byte memoizedIsInitialized = -1;
4492          public final boolean isInitialized() {
4493            byte isInitialized = memoizedIsInitialized;
4494            if (isInitialized != -1) return isInitialized == 1;
4495    
4496            for (int i = 0; i < getBlocksCount(); i++) {
4497              if (!getBlocks(i).isInitialized()) {
4498                memoizedIsInitialized = 0;
4499                return false;
4500              }
4501            }
4502            memoizedIsInitialized = 1;
4503            return true;
4504          }
4505    
4506          public void writeTo(com.google.protobuf.CodedOutputStream output)
4507                              throws java.io.IOException {
4508            getSerializedSize();
4509            if (((bitField0_ & 0x00000001) == 0x00000001)) {
4510              output.writeUInt32(1, replication_);
4511            }
4512            if (((bitField0_ & 0x00000002) == 0x00000002)) {
4513              output.writeUInt64(2, modificationTime_);
4514            }
4515            if (((bitField0_ & 0x00000004) == 0x00000004)) {
4516              output.writeUInt64(3, accessTime_);
4517            }
4518            if (((bitField0_ & 0x00000008) == 0x00000008)) {
4519              output.writeUInt64(4, preferredBlockSize_);
4520            }
4521            if (((bitField0_ & 0x00000010) == 0x00000010)) {
4522              output.writeFixed64(5, permission_);
4523            }
4524            for (int i = 0; i < blocks_.size(); i++) {
4525              output.writeMessage(6, blocks_.get(i));
4526            }
4527            if (((bitField0_ & 0x00000020) == 0x00000020)) {
4528              output.writeMessage(7, fileUC_);
4529            }
4530            if (((bitField0_ & 0x00000040) == 0x00000040)) {
4531              output.writeMessage(8, acl_);
4532            }
4533            getUnknownFields().writeTo(output);
4534          }
4535    
4536          private int memoizedSerializedSize = -1;
4537          public int getSerializedSize() {
4538            int size = memoizedSerializedSize;
4539            if (size != -1) return size;
4540    
4541            size = 0;
4542            if (((bitField0_ & 0x00000001) == 0x00000001)) {
4543              size += com.google.protobuf.CodedOutputStream
4544                .computeUInt32Size(1, replication_);
4545            }
4546            if (((bitField0_ & 0x00000002) == 0x00000002)) {
4547              size += com.google.protobuf.CodedOutputStream
4548                .computeUInt64Size(2, modificationTime_);
4549            }
4550            if (((bitField0_ & 0x00000004) == 0x00000004)) {
4551              size += com.google.protobuf.CodedOutputStream
4552                .computeUInt64Size(3, accessTime_);
4553            }
4554            if (((bitField0_ & 0x00000008) == 0x00000008)) {
4555              size += com.google.protobuf.CodedOutputStream
4556                .computeUInt64Size(4, preferredBlockSize_);
4557            }
4558            if (((bitField0_ & 0x00000010) == 0x00000010)) {
4559              size += com.google.protobuf.CodedOutputStream
4560                .computeFixed64Size(5, permission_);
4561            }
4562            for (int i = 0; i < blocks_.size(); i++) {
4563              size += com.google.protobuf.CodedOutputStream
4564                .computeMessageSize(6, blocks_.get(i));
4565            }
4566            if (((bitField0_ & 0x00000020) == 0x00000020)) {
4567              size += com.google.protobuf.CodedOutputStream
4568                .computeMessageSize(7, fileUC_);
4569            }
4570            if (((bitField0_ & 0x00000040) == 0x00000040)) {
4571              size += com.google.protobuf.CodedOutputStream
4572                .computeMessageSize(8, acl_);
4573            }
4574            size += getUnknownFields().getSerializedSize();
4575            memoizedSerializedSize = size;
4576            return size;
4577          }
4578    
4579          private static final long serialVersionUID = 0L;
4580          @java.lang.Override
4581          protected java.lang.Object writeReplace()
4582              throws java.io.ObjectStreamException {
4583            return super.writeReplace();
4584          }
4585    
4586          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
4587              com.google.protobuf.ByteString data)
4588              throws com.google.protobuf.InvalidProtocolBufferException {
4589            return PARSER.parseFrom(data);
4590          }
4591          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
4592              com.google.protobuf.ByteString data,
4593              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4594              throws com.google.protobuf.InvalidProtocolBufferException {
4595            return PARSER.parseFrom(data, extensionRegistry);
4596          }
4597          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(byte[] data)
4598              throws com.google.protobuf.InvalidProtocolBufferException {
4599            return PARSER.parseFrom(data);
4600          }
4601          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
4602              byte[] data,
4603              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4604              throws com.google.protobuf.InvalidProtocolBufferException {
4605            return PARSER.parseFrom(data, extensionRegistry);
4606          }
4607          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(java.io.InputStream input)
4608              throws java.io.IOException {
4609            return PARSER.parseFrom(input);
4610          }
4611          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
4612              java.io.InputStream input,
4613              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4614              throws java.io.IOException {
4615            return PARSER.parseFrom(input, extensionRegistry);
4616          }
4617          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseDelimitedFrom(java.io.InputStream input)
4618              throws java.io.IOException {
4619            return PARSER.parseDelimitedFrom(input);
4620          }
4621          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseDelimitedFrom(
4622              java.io.InputStream input,
4623              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4624              throws java.io.IOException {
4625            return PARSER.parseDelimitedFrom(input, extensionRegistry);
4626          }
4627          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
4628              com.google.protobuf.CodedInputStream input)
4629              throws java.io.IOException {
4630            return PARSER.parseFrom(input);
4631          }
4632          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
4633              com.google.protobuf.CodedInputStream input,
4634              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4635              throws java.io.IOException {
4636            return PARSER.parseFrom(input, extensionRegistry);
4637          }
4638    
4639          public static Builder newBuilder() { return Builder.create(); }
4640          public Builder newBuilderForType() { return newBuilder(); }
4641          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile prototype) {
4642            return newBuilder().mergeFrom(prototype);
4643          }
4644          public Builder toBuilder() { return newBuilder(this); }
4645    
4646          @java.lang.Override
4647          protected Builder newBuilderForType(
4648              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4649            Builder builder = new Builder(parent);
4650            return builder;
4651          }
4652          /**
4653           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeFile}
4654           */
4655          public static final class Builder extends
4656              com.google.protobuf.GeneratedMessage.Builder<Builder>
4657             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder {
4658            public static final com.google.protobuf.Descriptors.Descriptor
4659                getDescriptor() {
4660              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
4661            }
4662    
4663            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4664                internalGetFieldAccessorTable() {
4665              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable
4666                  .ensureFieldAccessorsInitialized(
4667                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder.class);
4668            }
4669    
4670            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.newBuilder()
4671            private Builder() {
4672              maybeForceBuilderInitialization();
4673            }
4674    
4675            private Builder(
4676                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4677              super(parent);
4678              maybeForceBuilderInitialization();
4679            }
4680            private void maybeForceBuilderInitialization() {
4681              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4682                getBlocksFieldBuilder();
4683                getFileUCFieldBuilder();
4684                getAclFieldBuilder();
4685              }
4686            }
4687            private static Builder create() {
4688              return new Builder();
4689            }
4690    
4691            public Builder clear() {
4692              super.clear();
4693              replication_ = 0;
4694              bitField0_ = (bitField0_ & ~0x00000001);
4695              modificationTime_ = 0L;
4696              bitField0_ = (bitField0_ & ~0x00000002);
4697              accessTime_ = 0L;
4698              bitField0_ = (bitField0_ & ~0x00000004);
4699              preferredBlockSize_ = 0L;
4700              bitField0_ = (bitField0_ & ~0x00000008);
4701              permission_ = 0L;
4702              bitField0_ = (bitField0_ & ~0x00000010);
4703              if (blocksBuilder_ == null) {
4704                blocks_ = java.util.Collections.emptyList();
4705                bitField0_ = (bitField0_ & ~0x00000020);
4706              } else {
4707                blocksBuilder_.clear();
4708              }
4709              if (fileUCBuilder_ == null) {
4710                fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
4711              } else {
4712                fileUCBuilder_.clear();
4713              }
4714              bitField0_ = (bitField0_ & ~0x00000040);
4715              if (aclBuilder_ == null) {
4716                acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
4717              } else {
4718                aclBuilder_.clear();
4719              }
4720              bitField0_ = (bitField0_ & ~0x00000080);
4721              return this;
4722            }
4723    
4724            public Builder clone() {
4725              return create().mergeFrom(buildPartial());
4726            }
4727    
4728            public com.google.protobuf.Descriptors.Descriptor
4729                getDescriptorForType() {
4730              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
4731            }
4732    
4733            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getDefaultInstanceForType() {
4734              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
4735            }
4736    
4737            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile build() {
4738              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile result = buildPartial();
4739              if (!result.isInitialized()) {
4740                throw newUninitializedMessageException(result);
4741              }
4742              return result;
4743            }
4744    
4745            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile buildPartial() {
4746              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile(this);
4747              int from_bitField0_ = bitField0_;
4748              int to_bitField0_ = 0;
4749              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4750                to_bitField0_ |= 0x00000001;
4751              }
4752              result.replication_ = replication_;
4753              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4754                to_bitField0_ |= 0x00000002;
4755              }
4756              result.modificationTime_ = modificationTime_;
4757              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4758                to_bitField0_ |= 0x00000004;
4759              }
4760              result.accessTime_ = accessTime_;
4761              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
4762                to_bitField0_ |= 0x00000008;
4763              }
4764              result.preferredBlockSize_ = preferredBlockSize_;
4765              if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
4766                to_bitField0_ |= 0x00000010;
4767              }
4768              result.permission_ = permission_;
4769              if (blocksBuilder_ == null) {
4770                if (((bitField0_ & 0x00000020) == 0x00000020)) {
4771                  blocks_ = java.util.Collections.unmodifiableList(blocks_);
4772                  bitField0_ = (bitField0_ & ~0x00000020);
4773                }
4774                result.blocks_ = blocks_;
4775              } else {
4776                result.blocks_ = blocksBuilder_.build();
4777              }
4778              if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
4779                to_bitField0_ |= 0x00000020;
4780              }
4781              if (fileUCBuilder_ == null) {
4782                result.fileUC_ = fileUC_;
4783              } else {
4784                result.fileUC_ = fileUCBuilder_.build();
4785              }
4786              if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
4787                to_bitField0_ |= 0x00000040;
4788              }
4789              if (aclBuilder_ == null) {
4790                result.acl_ = acl_;
4791              } else {
4792                result.acl_ = aclBuilder_.build();
4793              }
4794              result.bitField0_ = to_bitField0_;
4795              onBuilt();
4796              return result;
4797            }
4798    
4799            public Builder mergeFrom(com.google.protobuf.Message other) {
4800              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile) {
4801                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile)other);
4802              } else {
4803                super.mergeFrom(other);
4804                return this;
4805              }
4806            }
4807    
4808            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile other) {
4809              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance()) return this;
4810              if (other.hasReplication()) {
4811                setReplication(other.getReplication());
4812              }
4813              if (other.hasModificationTime()) {
4814                setModificationTime(other.getModificationTime());
4815              }
4816              if (other.hasAccessTime()) {
4817                setAccessTime(other.getAccessTime());
4818              }
4819              if (other.hasPreferredBlockSize()) {
4820                setPreferredBlockSize(other.getPreferredBlockSize());
4821              }
4822              if (other.hasPermission()) {
4823                setPermission(other.getPermission());
4824              }
4825              if (blocksBuilder_ == null) {
4826                if (!other.blocks_.isEmpty()) {
4827                  if (blocks_.isEmpty()) {
4828                    blocks_ = other.blocks_;
4829                    bitField0_ = (bitField0_ & ~0x00000020);
4830                  } else {
4831                    ensureBlocksIsMutable();
4832                    blocks_.addAll(other.blocks_);
4833                  }
4834                  onChanged();
4835                }
4836              } else {
4837                if (!other.blocks_.isEmpty()) {
4838                  if (blocksBuilder_.isEmpty()) {
4839                    blocksBuilder_.dispose();
4840                    blocksBuilder_ = null;
4841                    blocks_ = other.blocks_;
4842                    bitField0_ = (bitField0_ & ~0x00000020);
4843                    blocksBuilder_ = 
4844                      com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
4845                         getBlocksFieldBuilder() : null;
4846                  } else {
4847                    blocksBuilder_.addAllMessages(other.blocks_);
4848                  }
4849                }
4850              }
4851              if (other.hasFileUC()) {
4852                mergeFileUC(other.getFileUC());
4853              }
4854              if (other.hasAcl()) {
4855                mergeAcl(other.getAcl());
4856              }
4857              this.mergeUnknownFields(other.getUnknownFields());
4858              return this;
4859            }
4860    
4861            public final boolean isInitialized() {
4862              for (int i = 0; i < getBlocksCount(); i++) {
4863                if (!getBlocks(i).isInitialized()) {
4864                  
4865                  return false;
4866                }
4867              }
4868              return true;
4869            }
4870    
4871            public Builder mergeFrom(
4872                com.google.protobuf.CodedInputStream input,
4873                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4874                throws java.io.IOException {
4875              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parsedMessage = null;
4876              try {
4877                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4878              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4879                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile) e.getUnfinishedMessage();
4880                throw e;
4881              } finally {
4882                if (parsedMessage != null) {
4883                  mergeFrom(parsedMessage);
4884                }
4885              }
4886              return this;
4887            }
4888            private int bitField0_;
4889    
4890            // optional uint32 replication = 1;
4891            private int replication_ ;
4892            /**
4893             * <code>optional uint32 replication = 1;</code>
4894             */
4895            public boolean hasReplication() {
4896              return ((bitField0_ & 0x00000001) == 0x00000001);
4897            }
4898            /**
4899             * <code>optional uint32 replication = 1;</code>
4900             */
4901            public int getReplication() {
4902              return replication_;
4903            }
4904            /**
4905             * <code>optional uint32 replication = 1;</code>
4906             */
4907            public Builder setReplication(int value) {
4908              bitField0_ |= 0x00000001;
4909              replication_ = value;
4910              onChanged();
4911              return this;
4912            }
4913            /**
4914             * <code>optional uint32 replication = 1;</code>
4915             */
4916            public Builder clearReplication() {
4917              bitField0_ = (bitField0_ & ~0x00000001);
4918              replication_ = 0;
4919              onChanged();
4920              return this;
4921            }
4922    
4923            // optional uint64 modificationTime = 2;
4924            private long modificationTime_ ;
4925            /**
4926             * <code>optional uint64 modificationTime = 2;</code>
4927             */
4928            public boolean hasModificationTime() {
4929              return ((bitField0_ & 0x00000002) == 0x00000002);
4930            }
4931            /**
4932             * <code>optional uint64 modificationTime = 2;</code>
4933             */
4934            public long getModificationTime() {
4935              return modificationTime_;
4936            }
4937            /**
4938             * <code>optional uint64 modificationTime = 2;</code>
4939             */
4940            public Builder setModificationTime(long value) {
4941              bitField0_ |= 0x00000002;
4942              modificationTime_ = value;
4943              onChanged();
4944              return this;
4945            }
4946            /**
4947             * <code>optional uint64 modificationTime = 2;</code>
4948             */
4949            public Builder clearModificationTime() {
4950              bitField0_ = (bitField0_ & ~0x00000002);
4951              modificationTime_ = 0L;
4952              onChanged();
4953              return this;
4954            }
4955    
4956            // optional uint64 accessTime = 3;
4957            private long accessTime_ ;
4958            /**
4959             * <code>optional uint64 accessTime = 3;</code>
4960             */
4961            public boolean hasAccessTime() {
4962              return ((bitField0_ & 0x00000004) == 0x00000004);
4963            }
4964            /**
4965             * <code>optional uint64 accessTime = 3;</code>
4966             */
4967            public long getAccessTime() {
4968              return accessTime_;
4969            }
4970            /**
4971             * <code>optional uint64 accessTime = 3;</code>
4972             */
4973            public Builder setAccessTime(long value) {
4974              bitField0_ |= 0x00000004;
4975              accessTime_ = value;
4976              onChanged();
4977              return this;
4978            }
4979            /**
4980             * <code>optional uint64 accessTime = 3;</code>
4981             */
4982            public Builder clearAccessTime() {
4983              bitField0_ = (bitField0_ & ~0x00000004);
4984              accessTime_ = 0L;
4985              onChanged();
4986              return this;
4987            }
4988    
4989            // optional uint64 preferredBlockSize = 4;
4990            private long preferredBlockSize_ ;
4991            /**
4992             * <code>optional uint64 preferredBlockSize = 4;</code>
4993             */
4994            public boolean hasPreferredBlockSize() {
4995              return ((bitField0_ & 0x00000008) == 0x00000008);
4996            }
4997            /**
4998             * <code>optional uint64 preferredBlockSize = 4;</code>
4999             */
5000            public long getPreferredBlockSize() {
5001              return preferredBlockSize_;
5002            }
5003            /**
5004             * <code>optional uint64 preferredBlockSize = 4;</code>
5005             */
5006            public Builder setPreferredBlockSize(long value) {
5007              bitField0_ |= 0x00000008;
5008              preferredBlockSize_ = value;
5009              onChanged();
5010              return this;
5011            }
5012            /**
5013             * <code>optional uint64 preferredBlockSize = 4;</code>
5014             */
5015            public Builder clearPreferredBlockSize() {
5016              bitField0_ = (bitField0_ & ~0x00000008);
5017              preferredBlockSize_ = 0L;
5018              onChanged();
5019              return this;
5020            }
5021    
5022            // optional fixed64 permission = 5;
5023            private long permission_ ;
5024            /**
5025             * <code>optional fixed64 permission = 5;</code>
5026             */
5027            public boolean hasPermission() {
5028              return ((bitField0_ & 0x00000010) == 0x00000010);
5029            }
5030            /**
5031             * <code>optional fixed64 permission = 5;</code>
5032             */
5033            public long getPermission() {
5034              return permission_;
5035            }
5036            /**
5037             * <code>optional fixed64 permission = 5;</code>
5038             */
5039            public Builder setPermission(long value) {
5040              bitField0_ |= 0x00000010;
5041              permission_ = value;
5042              onChanged();
5043              return this;
5044            }
5045            /**
5046             * <code>optional fixed64 permission = 5;</code>
5047             */
5048            public Builder clearPermission() {
5049              bitField0_ = (bitField0_ & ~0x00000010);
5050              permission_ = 0L;
5051              onChanged();
5052              return this;
5053            }
5054    
5055            // repeated .hadoop.hdfs.BlockProto blocks = 6;
5056            private java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> blocks_ =
5057              java.util.Collections.emptyList();
5058            private void ensureBlocksIsMutable() {
5059              if (!((bitField0_ & 0x00000020) == 0x00000020)) {
5060                blocks_ = new java.util.ArrayList<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto>(blocks_);
5061                bitField0_ |= 0x00000020;
5062               }
5063            }
5064    
5065            private com.google.protobuf.RepeatedFieldBuilder<
5066                org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> blocksBuilder_;
5067    
5068            /**
5069             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5070             */
5071            public java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> getBlocksList() {
5072              if (blocksBuilder_ == null) {
5073                return java.util.Collections.unmodifiableList(blocks_);
5074              } else {
5075                return blocksBuilder_.getMessageList();
5076              }
5077            }
5078            /**
5079             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5080             */
5081            public int getBlocksCount() {
5082              if (blocksBuilder_ == null) {
5083                return blocks_.size();
5084              } else {
5085                return blocksBuilder_.getCount();
5086              }
5087            }
5088            /**
5089             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5090             */
5091            public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlocks(int index) {
5092              if (blocksBuilder_ == null) {
5093                return blocks_.get(index);
5094              } else {
5095                return blocksBuilder_.getMessage(index);
5096              }
5097            }
5098            /**
5099             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5100             */
5101            public Builder setBlocks(
5102                int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
5103              if (blocksBuilder_ == null) {
5104                if (value == null) {
5105                  throw new NullPointerException();
5106                }
5107                ensureBlocksIsMutable();
5108                blocks_.set(index, value);
5109                onChanged();
5110              } else {
5111                blocksBuilder_.setMessage(index, value);
5112              }
5113              return this;
5114            }
5115            /**
5116             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5117             */
5118            public Builder setBlocks(
5119                int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) {
5120              if (blocksBuilder_ == null) {
5121                ensureBlocksIsMutable();
5122                blocks_.set(index, builderForValue.build());
5123                onChanged();
5124              } else {
5125                blocksBuilder_.setMessage(index, builderForValue.build());
5126              }
5127              return this;
5128            }
5129            /**
5130             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5131             */
5132            public Builder addBlocks(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
5133              if (blocksBuilder_ == null) {
5134                if (value == null) {
5135                  throw new NullPointerException();
5136                }
5137                ensureBlocksIsMutable();
5138                blocks_.add(value);
5139                onChanged();
5140              } else {
5141                blocksBuilder_.addMessage(value);
5142              }
5143              return this;
5144            }
5145            /**
5146             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5147             */
5148            public Builder addBlocks(
5149                int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
5150              if (blocksBuilder_ == null) {
5151                if (value == null) {
5152                  throw new NullPointerException();
5153                }
5154                ensureBlocksIsMutable();
5155                blocks_.add(index, value);
5156                onChanged();
5157              } else {
5158                blocksBuilder_.addMessage(index, value);
5159              }
5160              return this;
5161            }
5162            /**
5163             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5164             */
5165            public Builder addBlocks(
5166                org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) {
5167              if (blocksBuilder_ == null) {
5168                ensureBlocksIsMutable();
5169                blocks_.add(builderForValue.build());
5170                onChanged();
5171              } else {
5172                blocksBuilder_.addMessage(builderForValue.build());
5173              }
5174              return this;
5175            }
5176            /**
5177             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5178             */
5179            public Builder addBlocks(
5180                int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) {
5181              if (blocksBuilder_ == null) {
5182                ensureBlocksIsMutable();
5183                blocks_.add(index, builderForValue.build());
5184                onChanged();
5185              } else {
5186                blocksBuilder_.addMessage(index, builderForValue.build());
5187              }
5188              return this;
5189            }
5190            /**
5191             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5192             */
5193            public Builder addAllBlocks(
5194                java.lang.Iterable<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> values) {
5195              if (blocksBuilder_ == null) {
5196                ensureBlocksIsMutable();
5197                super.addAll(values, blocks_);
5198                onChanged();
5199              } else {
5200                blocksBuilder_.addAllMessages(values);
5201              }
5202              return this;
5203            }
5204            /**
5205             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5206             */
5207            public Builder clearBlocks() {
5208              if (blocksBuilder_ == null) {
5209                blocks_ = java.util.Collections.emptyList();
5210                bitField0_ = (bitField0_ & ~0x00000020);
5211                onChanged();
5212              } else {
5213                blocksBuilder_.clear();
5214              }
5215              return this;
5216            }
5217            /**
5218             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5219             */
5220            public Builder removeBlocks(int index) {
5221              if (blocksBuilder_ == null) {
5222                ensureBlocksIsMutable();
5223                blocks_.remove(index);
5224                onChanged();
5225              } else {
5226                blocksBuilder_.remove(index);
5227              }
5228              return this;
5229            }
5230            /**
5231             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5232             */
5233            public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder getBlocksBuilder(
5234                int index) {
5235              return getBlocksFieldBuilder().getBuilder(index);
5236            }
5237            /**
5238             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5239             */
5240            public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlocksOrBuilder(
5241                int index) {
5242              if (blocksBuilder_ == null) {
5243                return blocks_.get(index);  } else {
5244                return blocksBuilder_.getMessageOrBuilder(index);
5245              }
5246            }
5247            /**
5248             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5249             */
5250            public java.util.List<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
5251                 getBlocksOrBuilderList() {
5252              if (blocksBuilder_ != null) {
5253                return blocksBuilder_.getMessageOrBuilderList();
5254              } else {
5255                return java.util.Collections.unmodifiableList(blocks_);
5256              }
5257            }
5258            /**
5259             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5260             */
5261            public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder addBlocksBuilder() {
5262              return getBlocksFieldBuilder().addBuilder(
5263                  org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance());
5264            }
5265            /**
5266             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5267             */
5268            public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder addBlocksBuilder(
5269                int index) {
5270              return getBlocksFieldBuilder().addBuilder(
5271                  index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance());
5272            }
5273            /**
5274             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5275             */
5276            public java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder> 
5277                 getBlocksBuilderList() {
5278              return getBlocksFieldBuilder().getBuilderList();
5279            }
5280            private com.google.protobuf.RepeatedFieldBuilder<
5281                org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
5282                getBlocksFieldBuilder() {
5283              if (blocksBuilder_ == null) {
5284                blocksBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5285                    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder>(
5286                        blocks_,
5287                        ((bitField0_ & 0x00000020) == 0x00000020),
5288                        getParentForChildren(),
5289                        isClean());
5290                blocks_ = null;
5291              }
5292              return blocksBuilder_;
5293            }
5294    
5295            // optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;
5296            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
5297            private com.google.protobuf.SingleFieldBuilder<
5298                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder> fileUCBuilder_;
5299            /**
5300             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5301             */
5302            public boolean hasFileUC() {
5303              return ((bitField0_ & 0x00000040) == 0x00000040);
5304            }
5305            /**
5306             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5307             */
5308            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getFileUC() {
5309              if (fileUCBuilder_ == null) {
5310                return fileUC_;
5311              } else {
5312                return fileUCBuilder_.getMessage();
5313              }
5314            }
5315            /**
5316             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5317             */
5318            public Builder setFileUC(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature value) {
5319              if (fileUCBuilder_ == null) {
5320                if (value == null) {
5321                  throw new NullPointerException();
5322                }
5323                fileUC_ = value;
5324                onChanged();
5325              } else {
5326                fileUCBuilder_.setMessage(value);
5327              }
5328              bitField0_ |= 0x00000040;
5329              return this;
5330            }
5331            /**
5332             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5333             */
5334            public Builder setFileUC(
5335                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder builderForValue) {
5336              if (fileUCBuilder_ == null) {
5337                fileUC_ = builderForValue.build();
5338                onChanged();
5339              } else {
5340                fileUCBuilder_.setMessage(builderForValue.build());
5341              }
5342              bitField0_ |= 0x00000040;
5343              return this;
5344            }
5345            /**
5346             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5347             */
5348            public Builder mergeFileUC(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature value) {
5349              if (fileUCBuilder_ == null) {
5350                if (((bitField0_ & 0x00000040) == 0x00000040) &&
5351                    fileUC_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance()) {
5352                  fileUC_ =
5353                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.newBuilder(fileUC_).mergeFrom(value).buildPartial();
5354                } else {
5355                  fileUC_ = value;
5356                }
5357                onChanged();
5358              } else {
5359                fileUCBuilder_.mergeFrom(value);
5360              }
5361              bitField0_ |= 0x00000040;
5362              return this;
5363            }
5364            /**
5365             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5366             */
5367            public Builder clearFileUC() {
5368              if (fileUCBuilder_ == null) {
5369                fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
5370                onChanged();
5371              } else {
5372                fileUCBuilder_.clear();
5373              }
5374              bitField0_ = (bitField0_ & ~0x00000040);
5375              return this;
5376            }
5377            /**
5378             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5379             */
5380            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder getFileUCBuilder() {
5381              bitField0_ |= 0x00000040;
5382              onChanged();
5383              return getFileUCFieldBuilder().getBuilder();
5384            }
5385            /**
5386             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5387             */
5388            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder getFileUCOrBuilder() {
5389              if (fileUCBuilder_ != null) {
5390                return fileUCBuilder_.getMessageOrBuilder();
5391              } else {
5392                return fileUC_;
5393              }
5394            }
5395            /**
5396             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5397             */
5398            private com.google.protobuf.SingleFieldBuilder<
5399                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder> 
5400                getFileUCFieldBuilder() {
5401              if (fileUCBuilder_ == null) {
5402                fileUCBuilder_ = new com.google.protobuf.SingleFieldBuilder<
5403                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder>(
5404                        fileUC_,
5405                        getParentForChildren(),
5406                        isClean());
5407                fileUC_ = null;
5408              }
5409              return fileUCBuilder_;
5410            }
5411    
5412            // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;
5413            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
5414            private com.google.protobuf.SingleFieldBuilder<
5415                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> aclBuilder_;
5416            /**
5417             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5418             */
5419            public boolean hasAcl() {
5420              return ((bitField0_ & 0x00000080) == 0x00000080);
5421            }
5422            /**
5423             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5424             */
5425            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
5426              if (aclBuilder_ == null) {
5427                return acl_;
5428              } else {
5429                return aclBuilder_.getMessage();
5430              }
5431            }
5432            /**
5433             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5434             */
5435            public Builder setAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
5436              if (aclBuilder_ == null) {
5437                if (value == null) {
5438                  throw new NullPointerException();
5439                }
5440                acl_ = value;
5441                onChanged();
5442              } else {
5443                aclBuilder_.setMessage(value);
5444              }
5445              bitField0_ |= 0x00000080;
5446              return this;
5447            }
5448            /**
5449             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5450             */
5451            public Builder setAcl(
5452                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder builderForValue) {
5453              if (aclBuilder_ == null) {
5454                acl_ = builderForValue.build();
5455                onChanged();
5456              } else {
5457                aclBuilder_.setMessage(builderForValue.build());
5458              }
5459              bitField0_ |= 0x00000080;
5460              return this;
5461            }
5462            /**
5463             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5464             */
5465            public Builder mergeAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
5466              if (aclBuilder_ == null) {
5467                if (((bitField0_ & 0x00000080) == 0x00000080) &&
5468                    acl_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance()) {
5469                  acl_ =
5470                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.newBuilder(acl_).mergeFrom(value).buildPartial();
5471                } else {
5472                  acl_ = value;
5473                }
5474                onChanged();
5475              } else {
5476                aclBuilder_.mergeFrom(value);
5477              }
5478              bitField0_ |= 0x00000080;
5479              return this;
5480            }
5481            /**
5482             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5483             */
5484            public Builder clearAcl() {
5485              if (aclBuilder_ == null) {
5486                acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
5487                onChanged();
5488              } else {
5489                aclBuilder_.clear();
5490              }
5491              bitField0_ = (bitField0_ & ~0x00000080);
5492              return this;
5493            }
5494            /**
5495             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5496             */
5497            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder getAclBuilder() {
5498              bitField0_ |= 0x00000080;
5499              onChanged();
5500              return getAclFieldBuilder().getBuilder();
5501            }
5502            /**
5503             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5504             */
5505            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
5506              if (aclBuilder_ != null) {
5507                return aclBuilder_.getMessageOrBuilder();
5508              } else {
5509                return acl_;
5510              }
5511            }
5512            /**
5513             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5514             */
5515            private com.google.protobuf.SingleFieldBuilder<
5516                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> 
5517                getAclFieldBuilder() {
5518              if (aclBuilder_ == null) {
5519                aclBuilder_ = new com.google.protobuf.SingleFieldBuilder<
5520                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder>(
5521                        acl_,
5522                        getParentForChildren(),
5523                        isClean());
5524                acl_ = null;
5525              }
5526              return aclBuilder_;
5527            }
5528    
5529            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INodeFile)
5530          }
5531    
5532          static {
5533            defaultInstance = new INodeFile(true);
5534            defaultInstance.initFields();
5535          }
5536    
5537          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INodeFile)
5538        }
5539    
5540        public interface INodeDirectoryOrBuilder
5541            extends com.google.protobuf.MessageOrBuilder {
5542    
5543          // optional uint64 modificationTime = 1;
5544          /**
5545           * <code>optional uint64 modificationTime = 1;</code>
5546           */
5547          boolean hasModificationTime();
5548          /**
5549           * <code>optional uint64 modificationTime = 1;</code>
5550           */
5551          long getModificationTime();
5552    
5553          // optional uint64 nsQuota = 2;
5554          /**
5555           * <code>optional uint64 nsQuota = 2;</code>
5556           *
5557           * <pre>
5558           * namespace quota
5559           * </pre>
5560           */
5561          boolean hasNsQuota();
5562          /**
5563           * <code>optional uint64 nsQuota = 2;</code>
5564           *
5565           * <pre>
5566           * namespace quota
5567           * </pre>
5568           */
5569          long getNsQuota();
5570    
5571          // optional uint64 dsQuota = 3;
5572          /**
5573           * <code>optional uint64 dsQuota = 3;</code>
5574           *
5575           * <pre>
5576           * diskspace quota
5577           * </pre>
5578           */
5579          boolean hasDsQuota();
5580          /**
5581           * <code>optional uint64 dsQuota = 3;</code>
5582           *
5583           * <pre>
5584           * diskspace quota
5585           * </pre>
5586           */
5587          long getDsQuota();
5588    
5589          // optional fixed64 permission = 4;
5590          /**
5591           * <code>optional fixed64 permission = 4;</code>
5592           */
5593          boolean hasPermission();
5594          /**
5595           * <code>optional fixed64 permission = 4;</code>
5596           */
5597          long getPermission();
5598    
5599          // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;
5600          /**
5601           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
5602           */
5603          boolean hasAcl();
5604          /**
5605           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
5606           */
5607          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl();
5608          /**
5609           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
5610           */
5611          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder();
5612        }
5613        /**
5614         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeDirectory}
5615         */
5616        public static final class INodeDirectory extends
5617            com.google.protobuf.GeneratedMessage
5618            implements INodeDirectoryOrBuilder {
5619          // Use INodeDirectory.newBuilder() to construct.
5620          private INodeDirectory(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5621            super(builder);
5622            this.unknownFields = builder.getUnknownFields();
5623          }
5624          private INodeDirectory(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5625    
5626          private static final INodeDirectory defaultInstance;
5627          public static INodeDirectory getDefaultInstance() {
5628            return defaultInstance;
5629          }
5630    
5631          public INodeDirectory getDefaultInstanceForType() {
5632            return defaultInstance;
5633          }
5634    
5635          private final com.google.protobuf.UnknownFieldSet unknownFields;
5636          @java.lang.Override
5637          public final com.google.protobuf.UnknownFieldSet
5638              getUnknownFields() {
5639            return this.unknownFields;
5640          }
5641          private INodeDirectory(
5642              com.google.protobuf.CodedInputStream input,
5643              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5644              throws com.google.protobuf.InvalidProtocolBufferException {
5645            initFields();
5646            int mutable_bitField0_ = 0;
5647            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5648                com.google.protobuf.UnknownFieldSet.newBuilder();
5649            try {
5650              boolean done = false;
5651              while (!done) {
5652                int tag = input.readTag();
5653                switch (tag) {
5654                  case 0:
5655                    done = true;
5656                    break;
5657                  default: {
5658                    if (!parseUnknownField(input, unknownFields,
5659                                           extensionRegistry, tag)) {
5660                      done = true;
5661                    }
5662                    break;
5663                  }
5664                  case 8: {
5665                    bitField0_ |= 0x00000001;
5666                    modificationTime_ = input.readUInt64();
5667                    break;
5668                  }
5669                  case 16: {
5670                    bitField0_ |= 0x00000002;
5671                    nsQuota_ = input.readUInt64();
5672                    break;
5673                  }
5674                  case 24: {
5675                    bitField0_ |= 0x00000004;
5676                    dsQuota_ = input.readUInt64();
5677                    break;
5678                  }
5679                  case 33: {
5680                    bitField0_ |= 0x00000008;
5681                    permission_ = input.readFixed64();
5682                    break;
5683                  }
5684                  case 42: {
5685                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder subBuilder = null;
5686                    if (((bitField0_ & 0x00000010) == 0x00000010)) {
5687                      subBuilder = acl_.toBuilder();
5688                    }
5689                    acl_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.PARSER, extensionRegistry);
5690                    if (subBuilder != null) {
5691                      subBuilder.mergeFrom(acl_);
5692                      acl_ = subBuilder.buildPartial();
5693                    }
5694                    bitField0_ |= 0x00000010;
5695                    break;
5696                  }
5697                }
5698              }
5699            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5700              throw e.setUnfinishedMessage(this);
5701            } catch (java.io.IOException e) {
5702              throw new com.google.protobuf.InvalidProtocolBufferException(
5703                  e.getMessage()).setUnfinishedMessage(this);
5704            } finally {
5705              this.unknownFields = unknownFields.build();
5706              makeExtensionsImmutable();
5707            }
5708          }
5709          public static final com.google.protobuf.Descriptors.Descriptor
5710              getDescriptor() {
5711            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
5712          }
5713    
5714          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5715              internalGetFieldAccessorTable() {
5716            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable
5717                .ensureFieldAccessorsInitialized(
5718                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder.class);
5719          }
5720    
5721          public static com.google.protobuf.Parser<INodeDirectory> PARSER =
5722              new com.google.protobuf.AbstractParser<INodeDirectory>() {
5723            public INodeDirectory parsePartialFrom(
5724                com.google.protobuf.CodedInputStream input,
5725                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5726                throws com.google.protobuf.InvalidProtocolBufferException {
5727              return new INodeDirectory(input, extensionRegistry);
5728            }
5729          };
5730    
5731          @java.lang.Override
5732          public com.google.protobuf.Parser<INodeDirectory> getParserForType() {
5733            return PARSER;
5734          }
5735    
5736          private int bitField0_;
5737          // optional uint64 modificationTime = 1;
5738          public static final int MODIFICATIONTIME_FIELD_NUMBER = 1;
5739          private long modificationTime_;
5740          /**
5741           * <code>optional uint64 modificationTime = 1;</code>
5742           */
5743          public boolean hasModificationTime() {
5744            return ((bitField0_ & 0x00000001) == 0x00000001);
5745          }
5746          /**
5747           * <code>optional uint64 modificationTime = 1;</code>
5748           */
5749          public long getModificationTime() {
5750            return modificationTime_;
5751          }
5752    
5753          // optional uint64 nsQuota = 2;
5754          public static final int NSQUOTA_FIELD_NUMBER = 2;
5755          private long nsQuota_;
5756          /**
5757           * <code>optional uint64 nsQuota = 2;</code>
5758           *
5759           * <pre>
5760           * namespace quota
5761           * </pre>
5762           */
5763          public boolean hasNsQuota() {
5764            return ((bitField0_ & 0x00000002) == 0x00000002);
5765          }
5766          /**
5767           * <code>optional uint64 nsQuota = 2;</code>
5768           *
5769           * <pre>
5770           * namespace quota
5771           * </pre>
5772           */
5773          public long getNsQuota() {
5774            return nsQuota_;
5775          }
5776    
5777          // optional uint64 dsQuota = 3;
5778          public static final int DSQUOTA_FIELD_NUMBER = 3;
5779          private long dsQuota_;
5780          /**
5781           * <code>optional uint64 dsQuota = 3;</code>
5782           *
5783           * <pre>
5784           * diskspace quota
5785           * </pre>
5786           */
5787          public boolean hasDsQuota() {
5788            return ((bitField0_ & 0x00000004) == 0x00000004);
5789          }
5790          /**
5791           * <code>optional uint64 dsQuota = 3;</code>
5792           *
5793           * <pre>
5794           * diskspace quota
5795           * </pre>
5796           */
5797          public long getDsQuota() {
5798            return dsQuota_;
5799          }
5800    
5801          // optional fixed64 permission = 4;
5802          public static final int PERMISSION_FIELD_NUMBER = 4;
5803          private long permission_;
5804          /**
5805           * <code>optional fixed64 permission = 4;</code>
5806           */
5807          public boolean hasPermission() {
5808            return ((bitField0_ & 0x00000008) == 0x00000008);
5809          }
5810          /**
5811           * <code>optional fixed64 permission = 4;</code>
5812           */
5813          public long getPermission() {
5814            return permission_;
5815          }
5816    
5817          // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;
5818          public static final int ACL_FIELD_NUMBER = 5;
5819          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_;
5820          /**
5821           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
5822           */
5823          public boolean hasAcl() {
5824            return ((bitField0_ & 0x00000010) == 0x00000010);
5825          }
5826          /**
5827           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
5828           */
5829          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
5830            return acl_;
5831          }
5832          /**
5833           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
5834           */
5835          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
5836            return acl_;
5837          }
5838    
5839          private void initFields() {
5840            modificationTime_ = 0L;
5841            nsQuota_ = 0L;
5842            dsQuota_ = 0L;
5843            permission_ = 0L;
5844            acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
5845          }
5846          private byte memoizedIsInitialized = -1;
5847          public final boolean isInitialized() {
5848            byte isInitialized = memoizedIsInitialized;
5849            if (isInitialized != -1) return isInitialized == 1;
5850    
5851            memoizedIsInitialized = 1;
5852            return true;
5853          }
5854    
5855          public void writeTo(com.google.protobuf.CodedOutputStream output)
5856                              throws java.io.IOException {
5857            getSerializedSize();
5858            if (((bitField0_ & 0x00000001) == 0x00000001)) {
5859              output.writeUInt64(1, modificationTime_);
5860            }
5861            if (((bitField0_ & 0x00000002) == 0x00000002)) {
5862              output.writeUInt64(2, nsQuota_);
5863            }
5864            if (((bitField0_ & 0x00000004) == 0x00000004)) {
5865              output.writeUInt64(3, dsQuota_);
5866            }
5867            if (((bitField0_ & 0x00000008) == 0x00000008)) {
5868              output.writeFixed64(4, permission_);
5869            }
5870            if (((bitField0_ & 0x00000010) == 0x00000010)) {
5871              output.writeMessage(5, acl_);
5872            }
5873            getUnknownFields().writeTo(output);
5874          }
5875    
5876          private int memoizedSerializedSize = -1;
5877          public int getSerializedSize() {
5878            int size = memoizedSerializedSize;
5879            if (size != -1) return size;
5880    
5881            size = 0;
5882            if (((bitField0_ & 0x00000001) == 0x00000001)) {
5883              size += com.google.protobuf.CodedOutputStream
5884                .computeUInt64Size(1, modificationTime_);
5885            }
5886            if (((bitField0_ & 0x00000002) == 0x00000002)) {
5887              size += com.google.protobuf.CodedOutputStream
5888                .computeUInt64Size(2, nsQuota_);
5889            }
5890            if (((bitField0_ & 0x00000004) == 0x00000004)) {
5891              size += com.google.protobuf.CodedOutputStream
5892                .computeUInt64Size(3, dsQuota_);
5893            }
5894            if (((bitField0_ & 0x00000008) == 0x00000008)) {
5895              size += com.google.protobuf.CodedOutputStream
5896                .computeFixed64Size(4, permission_);
5897            }
5898            if (((bitField0_ & 0x00000010) == 0x00000010)) {
5899              size += com.google.protobuf.CodedOutputStream
5900                .computeMessageSize(5, acl_);
5901            }
5902            size += getUnknownFields().getSerializedSize();
5903            memoizedSerializedSize = size;
5904            return size;
5905          }
5906    
5907          private static final long serialVersionUID = 0L;
5908          @java.lang.Override
5909          protected java.lang.Object writeReplace()
5910              throws java.io.ObjectStreamException {
5911            return super.writeReplace();
5912          }
5913    
5914          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
5915              com.google.protobuf.ByteString data)
5916              throws com.google.protobuf.InvalidProtocolBufferException {
5917            return PARSER.parseFrom(data);
5918          }
5919          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
5920              com.google.protobuf.ByteString data,
5921              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5922              throws com.google.protobuf.InvalidProtocolBufferException {
5923            return PARSER.parseFrom(data, extensionRegistry);
5924          }
5925          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(byte[] data)
5926              throws com.google.protobuf.InvalidProtocolBufferException {
5927            return PARSER.parseFrom(data);
5928          }
5929          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
5930              byte[] data,
5931              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5932              throws com.google.protobuf.InvalidProtocolBufferException {
5933            return PARSER.parseFrom(data, extensionRegistry);
5934          }
5935          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(java.io.InputStream input)
5936              throws java.io.IOException {
5937            return PARSER.parseFrom(input);
5938          }
5939          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
5940              java.io.InputStream input,
5941              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5942              throws java.io.IOException {
5943            return PARSER.parseFrom(input, extensionRegistry);
5944          }
5945          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseDelimitedFrom(java.io.InputStream input)
5946              throws java.io.IOException {
5947            return PARSER.parseDelimitedFrom(input);
5948          }
5949          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseDelimitedFrom(
5950              java.io.InputStream input,
5951              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5952              throws java.io.IOException {
5953            return PARSER.parseDelimitedFrom(input, extensionRegistry);
5954          }
5955          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
5956              com.google.protobuf.CodedInputStream input)
5957              throws java.io.IOException {
5958            return PARSER.parseFrom(input);
5959          }
5960          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
5961              com.google.protobuf.CodedInputStream input,
5962              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5963              throws java.io.IOException {
5964            return PARSER.parseFrom(input, extensionRegistry);
5965          }
5966    
5967          public static Builder newBuilder() { return Builder.create(); }
5968          public Builder newBuilderForType() { return newBuilder(); }
5969          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory prototype) {
5970            return newBuilder().mergeFrom(prototype);
5971          }
5972          public Builder toBuilder() { return newBuilder(this); }
5973    
5974          @java.lang.Override
5975          protected Builder newBuilderForType(
5976              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5977            Builder builder = new Builder(parent);
5978            return builder;
5979          }
5980          /**
5981           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeDirectory}
5982           */
5983          public static final class Builder extends
5984              com.google.protobuf.GeneratedMessage.Builder<Builder>
5985             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder {
5986            public static final com.google.protobuf.Descriptors.Descriptor
5987                getDescriptor() {
5988              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
5989            }
5990    
5991            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5992                internalGetFieldAccessorTable() {
5993              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable
5994                  .ensureFieldAccessorsInitialized(
5995                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder.class);
5996            }
5997    
5998            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.newBuilder()
5999            private Builder() {
6000              maybeForceBuilderInitialization();
6001            }
6002    
6003            private Builder(
6004                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6005              super(parent);
6006              maybeForceBuilderInitialization();
6007            }
6008            private void maybeForceBuilderInitialization() {
6009              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6010                getAclFieldBuilder();
6011              }
6012            }
6013            private static Builder create() {
6014              return new Builder();
6015            }
6016    
6017            public Builder clear() {
6018              super.clear();
6019              modificationTime_ = 0L;
6020              bitField0_ = (bitField0_ & ~0x00000001);
6021              nsQuota_ = 0L;
6022              bitField0_ = (bitField0_ & ~0x00000002);
6023              dsQuota_ = 0L;
6024              bitField0_ = (bitField0_ & ~0x00000004);
6025              permission_ = 0L;
6026              bitField0_ = (bitField0_ & ~0x00000008);
6027              if (aclBuilder_ == null) {
6028                acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
6029              } else {
6030                aclBuilder_.clear();
6031              }
6032              bitField0_ = (bitField0_ & ~0x00000010);
6033              return this;
6034            }
6035    
6036            public Builder clone() {
6037              return create().mergeFrom(buildPartial());
6038            }
6039    
6040            public com.google.protobuf.Descriptors.Descriptor
6041                getDescriptorForType() {
6042              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
6043            }
6044    
6045            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDefaultInstanceForType() {
6046              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
6047            }
6048    
6049            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory build() {
6050              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory result = buildPartial();
6051              if (!result.isInitialized()) {
6052                throw newUninitializedMessageException(result);
6053              }
6054              return result;
6055            }
6056    
6057            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory buildPartial() {
6058              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory(this);
6059              int from_bitField0_ = bitField0_;
6060              int to_bitField0_ = 0;
6061              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6062                to_bitField0_ |= 0x00000001;
6063              }
6064              result.modificationTime_ = modificationTime_;
6065              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
6066                to_bitField0_ |= 0x00000002;
6067              }
6068              result.nsQuota_ = nsQuota_;
6069              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
6070                to_bitField0_ |= 0x00000004;
6071              }
6072              result.dsQuota_ = dsQuota_;
6073              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
6074                to_bitField0_ |= 0x00000008;
6075              }
6076              result.permission_ = permission_;
6077              if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
6078                to_bitField0_ |= 0x00000010;
6079              }
6080              if (aclBuilder_ == null) {
6081                result.acl_ = acl_;
6082              } else {
6083                result.acl_ = aclBuilder_.build();
6084              }
6085              result.bitField0_ = to_bitField0_;
6086              onBuilt();
6087              return result;
6088            }
6089    
6090            public Builder mergeFrom(com.google.protobuf.Message other) {
6091              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory) {
6092                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory)other);
6093              } else {
6094                super.mergeFrom(other);
6095                return this;
6096              }
6097            }
6098    
6099            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory other) {
6100              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance()) return this;
6101              if (other.hasModificationTime()) {
6102                setModificationTime(other.getModificationTime());
6103              }
6104              if (other.hasNsQuota()) {
6105                setNsQuota(other.getNsQuota());
6106              }
6107              if (other.hasDsQuota()) {
6108                setDsQuota(other.getDsQuota());
6109              }
6110              if (other.hasPermission()) {
6111                setPermission(other.getPermission());
6112              }
6113              if (other.hasAcl()) {
6114                mergeAcl(other.getAcl());
6115              }
6116              this.mergeUnknownFields(other.getUnknownFields());
6117              return this;
6118            }
6119    
6120            public final boolean isInitialized() {
6121              return true;
6122            }
6123    
6124            public Builder mergeFrom(
6125                com.google.protobuf.CodedInputStream input,
6126                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6127                throws java.io.IOException {
6128              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parsedMessage = null;
6129              try {
6130                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6131              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6132                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory) e.getUnfinishedMessage();
6133                throw e;
6134              } finally {
6135                if (parsedMessage != null) {
6136                  mergeFrom(parsedMessage);
6137                }
6138              }
6139              return this;
6140            }
6141            private int bitField0_;
6142    
6143            // optional uint64 modificationTime = 1;
6144            private long modificationTime_ ;
6145            /**
6146             * <code>optional uint64 modificationTime = 1;</code>
6147             */
6148            public boolean hasModificationTime() {
6149              return ((bitField0_ & 0x00000001) == 0x00000001);
6150            }
6151            /**
6152             * <code>optional uint64 modificationTime = 1;</code>
6153             */
6154            public long getModificationTime() {
6155              return modificationTime_;
6156            }
6157            /**
6158             * <code>optional uint64 modificationTime = 1;</code>
6159             */
6160            public Builder setModificationTime(long value) {
6161              bitField0_ |= 0x00000001;
6162              modificationTime_ = value;
6163              onChanged();
6164              return this;
6165            }
6166            /**
6167             * <code>optional uint64 modificationTime = 1;</code>
6168             */
6169            public Builder clearModificationTime() {
6170              bitField0_ = (bitField0_ & ~0x00000001);
6171              modificationTime_ = 0L;
6172              onChanged();
6173              return this;
6174            }
6175    
6176            // optional uint64 nsQuota = 2;
6177            private long nsQuota_ ;
6178            /**
6179             * <code>optional uint64 nsQuota = 2;</code>
6180             *
6181             * <pre>
6182             * namespace quota
6183             * </pre>
6184             */
6185            public boolean hasNsQuota() {
6186              return ((bitField0_ & 0x00000002) == 0x00000002);
6187            }
6188            /**
6189             * <code>optional uint64 nsQuota = 2;</code>
6190             *
6191             * <pre>
6192             * namespace quota
6193             * </pre>
6194             */
6195            public long getNsQuota() {
6196              return nsQuota_;
6197            }
6198            /**
6199             * <code>optional uint64 nsQuota = 2;</code>
6200             *
6201             * <pre>
6202             * namespace quota
6203             * </pre>
6204             */
6205            public Builder setNsQuota(long value) {
6206              bitField0_ |= 0x00000002;
6207              nsQuota_ = value;
6208              onChanged();
6209              return this;
6210            }
6211            /**
6212             * <code>optional uint64 nsQuota = 2;</code>
6213             *
6214             * <pre>
6215             * namespace quota
6216             * </pre>
6217             */
6218            public Builder clearNsQuota() {
6219              bitField0_ = (bitField0_ & ~0x00000002);
6220              nsQuota_ = 0L;
6221              onChanged();
6222              return this;
6223            }
6224    
6225            // optional uint64 dsQuota = 3;
6226            private long dsQuota_ ;
6227            /**
6228             * <code>optional uint64 dsQuota = 3;</code>
6229             *
6230             * <pre>
6231             * diskspace quota
6232             * </pre>
6233             */
6234            public boolean hasDsQuota() {
6235              return ((bitField0_ & 0x00000004) == 0x00000004);
6236            }
6237            /**
6238             * <code>optional uint64 dsQuota = 3;</code>
6239             *
6240             * <pre>
6241             * diskspace quota
6242             * </pre>
6243             */
6244            public long getDsQuota() {
6245              return dsQuota_;
6246            }
6247            /**
6248             * <code>optional uint64 dsQuota = 3;</code>
6249             *
6250             * <pre>
6251             * diskspace quota
6252             * </pre>
6253             */
6254            public Builder setDsQuota(long value) {
6255              bitField0_ |= 0x00000004;
6256              dsQuota_ = value;
6257              onChanged();
6258              return this;
6259            }
6260            /**
6261             * <code>optional uint64 dsQuota = 3;</code>
6262             *
6263             * <pre>
6264             * diskspace quota
6265             * </pre>
6266             */
6267            public Builder clearDsQuota() {
6268              bitField0_ = (bitField0_ & ~0x00000004);
6269              dsQuota_ = 0L;
6270              onChanged();
6271              return this;
6272            }
6273    
6274            // optional fixed64 permission = 4;
6275            private long permission_ ;
6276            /**
6277             * <code>optional fixed64 permission = 4;</code>
6278             */
6279            public boolean hasPermission() {
6280              return ((bitField0_ & 0x00000008) == 0x00000008);
6281            }
6282            /**
6283             * <code>optional fixed64 permission = 4;</code>
6284             */
6285            public long getPermission() {
6286              return permission_;
6287            }
6288            /**
6289             * <code>optional fixed64 permission = 4;</code>
6290             */
6291            public Builder setPermission(long value) {
6292              bitField0_ |= 0x00000008;
6293              permission_ = value;
6294              onChanged();
6295              return this;
6296            }
6297            /**
6298             * <code>optional fixed64 permission = 4;</code>
6299             */
6300            public Builder clearPermission() {
6301              bitField0_ = (bitField0_ & ~0x00000008);
6302              permission_ = 0L;
6303              onChanged();
6304              return this;
6305            }
6306    
6307            // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;
6308            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
6309            private com.google.protobuf.SingleFieldBuilder<
6310                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> aclBuilder_;
6311            /**
6312             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
6313             */
6314            public boolean hasAcl() {
6315              return ((bitField0_ & 0x00000010) == 0x00000010);
6316            }
6317            /**
6318             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
6319             */
6320            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
6321              if (aclBuilder_ == null) {
6322                return acl_;
6323              } else {
6324                return aclBuilder_.getMessage();
6325              }
6326            }
6327            /**
6328             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
6329             */
6330            public Builder setAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
6331              if (aclBuilder_ == null) {
6332                if (value == null) {
6333                  throw new NullPointerException();
6334                }
6335                acl_ = value;
6336                onChanged();
6337              } else {
6338                aclBuilder_.setMessage(value);
6339              }
6340              bitField0_ |= 0x00000010;
6341              return this;
6342            }
6343            /**
6344             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
6345             */
6346            public Builder setAcl(
6347                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder builderForValue) {
6348              if (aclBuilder_ == null) {
6349                acl_ = builderForValue.build();
6350                onChanged();
6351              } else {
6352                aclBuilder_.setMessage(builderForValue.build());
6353              }
6354              bitField0_ |= 0x00000010;
6355              return this;
6356            }
6357            /**
6358             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
6359             */
6360            public Builder mergeAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
6361              if (aclBuilder_ == null) {
6362                if (((bitField0_ & 0x00000010) == 0x00000010) &&
6363                    acl_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance()) {
6364                  acl_ =
6365                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.newBuilder(acl_).mergeFrom(value).buildPartial();
6366                } else {
6367                  acl_ = value;
6368                }
6369                onChanged();
6370              } else {
6371                aclBuilder_.mergeFrom(value);
6372              }
6373              bitField0_ |= 0x00000010;
6374              return this;
6375            }
6376            /**
6377             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
6378             */
6379            public Builder clearAcl() {
6380              if (aclBuilder_ == null) {
6381                acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
6382                onChanged();
6383              } else {
6384                aclBuilder_.clear();
6385              }
6386              bitField0_ = (bitField0_ & ~0x00000010);
6387              return this;
6388            }
6389            /**
6390             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
6391             */
6392            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder getAclBuilder() {
6393              bitField0_ |= 0x00000010;
6394              onChanged();
6395              return getAclFieldBuilder().getBuilder();
6396            }
6397            /**
6398             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
6399             */
6400            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
6401              if (aclBuilder_ != null) {
6402                return aclBuilder_.getMessageOrBuilder();
6403              } else {
6404                return acl_;
6405              }
6406            }
6407            /**
6408             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
6409             */
6410            private com.google.protobuf.SingleFieldBuilder<
6411                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> 
6412                getAclFieldBuilder() {
6413              if (aclBuilder_ == null) {
6414                aclBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6415                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder>(
6416                        acl_,
6417                        getParentForChildren(),
6418                        isClean());
6419                acl_ = null;
6420              }
6421              return aclBuilder_;
6422            }
6423    
6424            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INodeDirectory)
6425          }
6426    
6427          static {
6428            defaultInstance = new INodeDirectory(true);
6429            defaultInstance.initFields();
6430          }
6431    
6432          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INodeDirectory)
6433        }
6434    
6435        public interface INodeSymlinkOrBuilder
6436            extends com.google.protobuf.MessageOrBuilder {
6437    
6438          // optional fixed64 permission = 1;
6439          /**
6440           * <code>optional fixed64 permission = 1;</code>
6441           */
6442          boolean hasPermission();
6443          /**
6444           * <code>optional fixed64 permission = 1;</code>
6445           */
6446          long getPermission();
6447    
6448          // optional bytes target = 2;
6449          /**
6450           * <code>optional bytes target = 2;</code>
6451           */
6452          boolean hasTarget();
6453          /**
6454           * <code>optional bytes target = 2;</code>
6455           */
6456          com.google.protobuf.ByteString getTarget();
6457    
6458          // optional uint64 modificationTime = 3;
6459          /**
6460           * <code>optional uint64 modificationTime = 3;</code>
6461           */
6462          boolean hasModificationTime();
6463          /**
6464           * <code>optional uint64 modificationTime = 3;</code>
6465           */
6466          long getModificationTime();
6467    
6468          // optional uint64 accessTime = 4;
6469          /**
6470           * <code>optional uint64 accessTime = 4;</code>
6471           */
6472          boolean hasAccessTime();
6473          /**
6474           * <code>optional uint64 accessTime = 4;</code>
6475           */
6476          long getAccessTime();
6477        }
6478        /**
6479         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeSymlink}
6480         */
6481        public static final class INodeSymlink extends
6482            com.google.protobuf.GeneratedMessage
6483            implements INodeSymlinkOrBuilder {
6484          // Use INodeSymlink.newBuilder() to construct.
6485          private INodeSymlink(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6486            super(builder);
6487            this.unknownFields = builder.getUnknownFields();
6488          }
6489          private INodeSymlink(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6490    
6491          private static final INodeSymlink defaultInstance;
6492          public static INodeSymlink getDefaultInstance() {
6493            return defaultInstance;
6494          }
6495    
6496          public INodeSymlink getDefaultInstanceForType() {
6497            return defaultInstance;
6498          }
6499    
6500          private final com.google.protobuf.UnknownFieldSet unknownFields;
6501          @java.lang.Override
6502          public final com.google.protobuf.UnknownFieldSet
6503              getUnknownFields() {
6504            return this.unknownFields;
6505          }
6506          private INodeSymlink(
6507              com.google.protobuf.CodedInputStream input,
6508              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6509              throws com.google.protobuf.InvalidProtocolBufferException {
6510            initFields();
6511            int mutable_bitField0_ = 0;
6512            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6513                com.google.protobuf.UnknownFieldSet.newBuilder();
6514            try {
6515              boolean done = false;
6516              while (!done) {
6517                int tag = input.readTag();
6518                switch (tag) {
6519                  case 0:
6520                    done = true;
6521                    break;
6522                  default: {
6523                    if (!parseUnknownField(input, unknownFields,
6524                                           extensionRegistry, tag)) {
6525                      done = true;
6526                    }
6527                    break;
6528                  }
6529                  case 9: {
6530                    bitField0_ |= 0x00000001;
6531                    permission_ = input.readFixed64();
6532                    break;
6533                  }
6534                  case 18: {
6535                    bitField0_ |= 0x00000002;
6536                    target_ = input.readBytes();
6537                    break;
6538                  }
6539                  case 24: {
6540                    bitField0_ |= 0x00000004;
6541                    modificationTime_ = input.readUInt64();
6542                    break;
6543                  }
6544                  case 32: {
6545                    bitField0_ |= 0x00000008;
6546                    accessTime_ = input.readUInt64();
6547                    break;
6548                  }
6549                }
6550              }
6551            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6552              throw e.setUnfinishedMessage(this);
6553            } catch (java.io.IOException e) {
6554              throw new com.google.protobuf.InvalidProtocolBufferException(
6555                  e.getMessage()).setUnfinishedMessage(this);
6556            } finally {
6557              this.unknownFields = unknownFields.build();
6558              makeExtensionsImmutable();
6559            }
6560          }
6561          public static final com.google.protobuf.Descriptors.Descriptor
6562              getDescriptor() {
6563            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
6564          }
6565    
6566          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6567              internalGetFieldAccessorTable() {
6568            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable
6569                .ensureFieldAccessorsInitialized(
6570                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder.class);
6571          }
6572    
6573          public static com.google.protobuf.Parser<INodeSymlink> PARSER =
6574              new com.google.protobuf.AbstractParser<INodeSymlink>() {
6575            public INodeSymlink parsePartialFrom(
6576                com.google.protobuf.CodedInputStream input,
6577                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6578                throws com.google.protobuf.InvalidProtocolBufferException {
6579              return new INodeSymlink(input, extensionRegistry);
6580            }
6581          };
6582    
6583          @java.lang.Override
6584          public com.google.protobuf.Parser<INodeSymlink> getParserForType() {
6585            return PARSER;
6586          }
6587    
6588          private int bitField0_;
6589          // optional fixed64 permission = 1;
6590          public static final int PERMISSION_FIELD_NUMBER = 1;
6591          private long permission_;
6592          /**
6593           * <code>optional fixed64 permission = 1;</code>
6594           */
6595          public boolean hasPermission() {
6596            return ((bitField0_ & 0x00000001) == 0x00000001);
6597          }
6598          /**
6599           * <code>optional fixed64 permission = 1;</code>
6600           */
6601          public long getPermission() {
6602            return permission_;
6603          }
6604    
6605          // optional bytes target = 2;
6606          public static final int TARGET_FIELD_NUMBER = 2;
6607          private com.google.protobuf.ByteString target_;
6608          /**
6609           * <code>optional bytes target = 2;</code>
6610           */
6611          public boolean hasTarget() {
6612            return ((bitField0_ & 0x00000002) == 0x00000002);
6613          }
6614          /**
6615           * <code>optional bytes target = 2;</code>
6616           */
6617          public com.google.protobuf.ByteString getTarget() {
6618            return target_;
6619          }
6620    
6621          // optional uint64 modificationTime = 3;
6622          public static final int MODIFICATIONTIME_FIELD_NUMBER = 3;
6623          private long modificationTime_;
6624          /**
6625           * <code>optional uint64 modificationTime = 3;</code>
6626           */
6627          public boolean hasModificationTime() {
6628            return ((bitField0_ & 0x00000004) == 0x00000004);
6629          }
6630          /**
6631           * <code>optional uint64 modificationTime = 3;</code>
6632           */
6633          public long getModificationTime() {
6634            return modificationTime_;
6635          }
6636    
6637          // optional uint64 accessTime = 4;
6638          public static final int ACCESSTIME_FIELD_NUMBER = 4;
6639          private long accessTime_;
6640          /**
6641           * <code>optional uint64 accessTime = 4;</code>
6642           */
6643          public boolean hasAccessTime() {
6644            return ((bitField0_ & 0x00000008) == 0x00000008);
6645          }
6646          /**
6647           * <code>optional uint64 accessTime = 4;</code>
6648           */
6649          public long getAccessTime() {
6650            return accessTime_;
6651          }
6652    
6653          private void initFields() {
6654            permission_ = 0L;
6655            target_ = com.google.protobuf.ByteString.EMPTY;
6656            modificationTime_ = 0L;
6657            accessTime_ = 0L;
6658          }
6659          private byte memoizedIsInitialized = -1;
6660          public final boolean isInitialized() {
6661            byte isInitialized = memoizedIsInitialized;
6662            if (isInitialized != -1) return isInitialized == 1;
6663    
6664            memoizedIsInitialized = 1;
6665            return true;
6666          }
6667    
6668          public void writeTo(com.google.protobuf.CodedOutputStream output)
6669                              throws java.io.IOException {
6670            getSerializedSize();
6671            if (((bitField0_ & 0x00000001) == 0x00000001)) {
6672              output.writeFixed64(1, permission_);
6673            }
6674            if (((bitField0_ & 0x00000002) == 0x00000002)) {
6675              output.writeBytes(2, target_);
6676            }
6677            if (((bitField0_ & 0x00000004) == 0x00000004)) {
6678              output.writeUInt64(3, modificationTime_);
6679            }
6680            if (((bitField0_ & 0x00000008) == 0x00000008)) {
6681              output.writeUInt64(4, accessTime_);
6682            }
6683            getUnknownFields().writeTo(output);
6684          }
6685    
6686          private int memoizedSerializedSize = -1;
6687          public int getSerializedSize() {
6688            int size = memoizedSerializedSize;
6689            if (size != -1) return size;
6690    
6691            size = 0;
6692            if (((bitField0_ & 0x00000001) == 0x00000001)) {
6693              size += com.google.protobuf.CodedOutputStream
6694                .computeFixed64Size(1, permission_);
6695            }
6696            if (((bitField0_ & 0x00000002) == 0x00000002)) {
6697              size += com.google.protobuf.CodedOutputStream
6698                .computeBytesSize(2, target_);
6699            }
6700            if (((bitField0_ & 0x00000004) == 0x00000004)) {
6701              size += com.google.protobuf.CodedOutputStream
6702                .computeUInt64Size(3, modificationTime_);
6703            }
6704            if (((bitField0_ & 0x00000008) == 0x00000008)) {
6705              size += com.google.protobuf.CodedOutputStream
6706                .computeUInt64Size(4, accessTime_);
6707            }
6708            size += getUnknownFields().getSerializedSize();
6709            memoizedSerializedSize = size;
6710            return size;
6711          }
6712    
6713          private static final long serialVersionUID = 0L;
6714          @java.lang.Override
6715          protected java.lang.Object writeReplace()
6716              throws java.io.ObjectStreamException {
6717            return super.writeReplace();
6718          }
6719    
6720          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
6721              com.google.protobuf.ByteString data)
6722              throws com.google.protobuf.InvalidProtocolBufferException {
6723            return PARSER.parseFrom(data);
6724          }
6725          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
6726              com.google.protobuf.ByteString data,
6727              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6728              throws com.google.protobuf.InvalidProtocolBufferException {
6729            return PARSER.parseFrom(data, extensionRegistry);
6730          }
6731          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(byte[] data)
6732              throws com.google.protobuf.InvalidProtocolBufferException {
6733            return PARSER.parseFrom(data);
6734          }
6735          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
6736              byte[] data,
6737              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6738              throws com.google.protobuf.InvalidProtocolBufferException {
6739            return PARSER.parseFrom(data, extensionRegistry);
6740          }
6741          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(java.io.InputStream input)
6742              throws java.io.IOException {
6743            return PARSER.parseFrom(input);
6744          }
6745          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
6746              java.io.InputStream input,
6747              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6748              throws java.io.IOException {
6749            return PARSER.parseFrom(input, extensionRegistry);
6750          }
6751          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseDelimitedFrom(java.io.InputStream input)
6752              throws java.io.IOException {
6753            return PARSER.parseDelimitedFrom(input);
6754          }
6755          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseDelimitedFrom(
6756              java.io.InputStream input,
6757              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6758              throws java.io.IOException {
6759            return PARSER.parseDelimitedFrom(input, extensionRegistry);
6760          }
6761          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
6762              com.google.protobuf.CodedInputStream input)
6763              throws java.io.IOException {
6764            return PARSER.parseFrom(input);
6765          }
6766          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
6767              com.google.protobuf.CodedInputStream input,
6768              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6769              throws java.io.IOException {
6770            return PARSER.parseFrom(input, extensionRegistry);
6771          }
6772    
6773          public static Builder newBuilder() { return Builder.create(); }
6774          public Builder newBuilderForType() { return newBuilder(); }
6775          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink prototype) {
6776            return newBuilder().mergeFrom(prototype);
6777          }
6778          public Builder toBuilder() { return newBuilder(this); }
6779    
6780          @java.lang.Override
6781          protected Builder newBuilderForType(
6782              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6783            Builder builder = new Builder(parent);
6784            return builder;
6785          }
6786          /**
6787           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeSymlink}
6788           */
6789          public static final class Builder extends
6790              com.google.protobuf.GeneratedMessage.Builder<Builder>
6791             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder {
6792            public static final com.google.protobuf.Descriptors.Descriptor
6793                getDescriptor() {
6794              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
6795            }
6796    
6797            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6798                internalGetFieldAccessorTable() {
6799              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable
6800                  .ensureFieldAccessorsInitialized(
6801                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder.class);
6802            }
6803    
6804            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.newBuilder()
6805            private Builder() {
6806              maybeForceBuilderInitialization();
6807            }
6808    
6809            private Builder(
6810                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6811              super(parent);
6812              maybeForceBuilderInitialization();
6813            }
6814            private void maybeForceBuilderInitialization() {
6815              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6816              }
6817            }
6818            private static Builder create() {
6819              return new Builder();
6820            }
6821    
6822            public Builder clear() {
6823              super.clear();
6824              permission_ = 0L;
6825              bitField0_ = (bitField0_ & ~0x00000001);
6826              target_ = com.google.protobuf.ByteString.EMPTY;
6827              bitField0_ = (bitField0_ & ~0x00000002);
6828              modificationTime_ = 0L;
6829              bitField0_ = (bitField0_ & ~0x00000004);
6830              accessTime_ = 0L;
6831              bitField0_ = (bitField0_ & ~0x00000008);
6832              return this;
6833            }
6834    
6835            public Builder clone() {
6836              return create().mergeFrom(buildPartial());
6837            }
6838    
6839            public com.google.protobuf.Descriptors.Descriptor
6840                getDescriptorForType() {
6841              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
6842            }
6843    
6844            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getDefaultInstanceForType() {
6845              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
6846            }
6847    
6848            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink build() {
6849              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink result = buildPartial();
6850              if (!result.isInitialized()) {
6851                throw newUninitializedMessageException(result);
6852              }
6853              return result;
6854            }
6855    
6856            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink buildPartial() {
6857              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink(this);
6858              int from_bitField0_ = bitField0_;
6859              int to_bitField0_ = 0;
6860              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6861                to_bitField0_ |= 0x00000001;
6862              }
6863              result.permission_ = permission_;
6864              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
6865                to_bitField0_ |= 0x00000002;
6866              }
6867              result.target_ = target_;
6868              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
6869                to_bitField0_ |= 0x00000004;
6870              }
6871              result.modificationTime_ = modificationTime_;
6872              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
6873                to_bitField0_ |= 0x00000008;
6874              }
6875              result.accessTime_ = accessTime_;
6876              result.bitField0_ = to_bitField0_;
6877              onBuilt();
6878              return result;
6879            }
6880    
6881            public Builder mergeFrom(com.google.protobuf.Message other) {
6882              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink) {
6883                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink)other);
6884              } else {
6885                super.mergeFrom(other);
6886                return this;
6887              }
6888            }
6889    
6890            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink other) {
6891              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance()) return this;
6892              if (other.hasPermission()) {
6893                setPermission(other.getPermission());
6894              }
6895              if (other.hasTarget()) {
6896                setTarget(other.getTarget());
6897              }
6898              if (other.hasModificationTime()) {
6899                setModificationTime(other.getModificationTime());
6900              }
6901              if (other.hasAccessTime()) {
6902                setAccessTime(other.getAccessTime());
6903              }
6904              this.mergeUnknownFields(other.getUnknownFields());
6905              return this;
6906            }
6907    
6908            public final boolean isInitialized() {
6909              return true;
6910            }
6911    
6912            public Builder mergeFrom(
6913                com.google.protobuf.CodedInputStream input,
6914                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6915                throws java.io.IOException {
6916              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parsedMessage = null;
6917              try {
6918                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6919              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6920                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink) e.getUnfinishedMessage();
6921                throw e;
6922              } finally {
6923                if (parsedMessage != null) {
6924                  mergeFrom(parsedMessage);
6925                }
6926              }
6927              return this;
6928            }
6929            private int bitField0_;
6930    
6931            // optional fixed64 permission = 1;
6932            private long permission_ ;
6933            /**
6934             * <code>optional fixed64 permission = 1;</code>
6935             */
6936            public boolean hasPermission() {
6937              return ((bitField0_ & 0x00000001) == 0x00000001);
6938            }
6939            /**
6940             * <code>optional fixed64 permission = 1;</code>
6941             */
6942            public long getPermission() {
6943              return permission_;
6944            }
6945            /**
6946             * <code>optional fixed64 permission = 1;</code>
6947             */
6948            public Builder setPermission(long value) {
6949              bitField0_ |= 0x00000001;
6950              permission_ = value;
6951              onChanged();
6952              return this;
6953            }
6954            /**
6955             * <code>optional fixed64 permission = 1;</code>
6956             */
6957            public Builder clearPermission() {
6958              bitField0_ = (bitField0_ & ~0x00000001);
6959              permission_ = 0L;
6960              onChanged();
6961              return this;
6962            }
6963    
6964            // optional bytes target = 2;
6965            private com.google.protobuf.ByteString target_ = com.google.protobuf.ByteString.EMPTY;
6966            /**
6967             * <code>optional bytes target = 2;</code>
6968             */
6969            public boolean hasTarget() {
6970              return ((bitField0_ & 0x00000002) == 0x00000002);
6971            }
6972            /**
6973             * <code>optional bytes target = 2;</code>
6974             */
6975            public com.google.protobuf.ByteString getTarget() {
6976              return target_;
6977            }
6978            /**
6979             * <code>optional bytes target = 2;</code>
6980             */
6981            public Builder setTarget(com.google.protobuf.ByteString value) {
6982              if (value == null) {
6983        throw new NullPointerException();
6984      }
6985      bitField0_ |= 0x00000002;
6986              target_ = value;
6987              onChanged();
6988              return this;
6989            }
6990            /**
6991             * <code>optional bytes target = 2;</code>
6992             */
6993            public Builder clearTarget() {
6994              bitField0_ = (bitField0_ & ~0x00000002);
6995              target_ = getDefaultInstance().getTarget();
6996              onChanged();
6997              return this;
6998            }
6999    
7000            // optional uint64 modificationTime = 3;
7001            private long modificationTime_ ;
7002            /**
7003             * <code>optional uint64 modificationTime = 3;</code>
7004             */
7005            public boolean hasModificationTime() {
7006              return ((bitField0_ & 0x00000004) == 0x00000004);
7007            }
7008            /**
7009             * <code>optional uint64 modificationTime = 3;</code>
7010             */
7011            public long getModificationTime() {
7012              return modificationTime_;
7013            }
7014            /**
7015             * <code>optional uint64 modificationTime = 3;</code>
7016             */
7017            public Builder setModificationTime(long value) {
7018              bitField0_ |= 0x00000004;
7019              modificationTime_ = value;
7020              onChanged();
7021              return this;
7022            }
7023            /**
7024             * <code>optional uint64 modificationTime = 3;</code>
7025             */
7026            public Builder clearModificationTime() {
7027              bitField0_ = (bitField0_ & ~0x00000004);
7028              modificationTime_ = 0L;
7029              onChanged();
7030              return this;
7031            }
7032    
7033            // optional uint64 accessTime = 4;
7034            private long accessTime_ ;
7035            /**
7036             * <code>optional uint64 accessTime = 4;</code>
7037             */
7038            public boolean hasAccessTime() {
7039              return ((bitField0_ & 0x00000008) == 0x00000008);
7040            }
7041            /**
7042             * <code>optional uint64 accessTime = 4;</code>
7043             */
7044            public long getAccessTime() {
7045              return accessTime_;
7046            }
7047            /**
7048             * <code>optional uint64 accessTime = 4;</code>
7049             */
7050            public Builder setAccessTime(long value) {
7051              bitField0_ |= 0x00000008;
7052              accessTime_ = value;
7053              onChanged();
7054              return this;
7055            }
7056            /**
7057             * <code>optional uint64 accessTime = 4;</code>
7058             */
7059            public Builder clearAccessTime() {
7060              bitField0_ = (bitField0_ & ~0x00000008);
7061              accessTime_ = 0L;
7062              onChanged();
7063              return this;
7064            }
7065    
7066            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INodeSymlink)
7067          }
7068    
7069          static {
7070            defaultInstance = new INodeSymlink(true);
7071            defaultInstance.initFields();
7072          }
7073    
7074          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INodeSymlink)
7075        }
7076    
7077        public interface INodeOrBuilder
7078            extends com.google.protobuf.MessageOrBuilder {
7079    
7080          // required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;
7081          /**
7082           * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
7083           */
7084          boolean hasType();
7085          /**
7086           * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
7087           */
7088          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type getType();
7089    
7090          // required uint64 id = 2;
7091          /**
7092           * <code>required uint64 id = 2;</code>
7093           */
7094          boolean hasId();
7095          /**
7096           * <code>required uint64 id = 2;</code>
7097           */
7098          long getId();
7099    
7100          // optional bytes name = 3;
7101          /**
7102           * <code>optional bytes name = 3;</code>
7103           */
7104          boolean hasName();
7105          /**
7106           * <code>optional bytes name = 3;</code>
7107           */
7108          com.google.protobuf.ByteString getName();
7109    
7110          // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;
7111          /**
7112           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
7113           */
7114          boolean hasFile();
7115          /**
7116           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
7117           */
7118          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getFile();
7119          /**
7120           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
7121           */
7122          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getFileOrBuilder();
7123    
7124          // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;
7125          /**
7126           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
7127           */
7128          boolean hasDirectory();
7129          /**
7130           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
7131           */
7132          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDirectory();
7133          /**
7134           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
7135           */
7136          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getDirectoryOrBuilder();
7137    
7138          // optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;
7139          /**
7140           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
7141           */
7142          boolean hasSymlink();
7143          /**
7144           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
7145           */
7146          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getSymlink();
7147          /**
7148           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
7149           */
7150          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder getSymlinkOrBuilder();
7151        }
7152        /**
7153         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INode}
7154         */
7155        public static final class INode extends
7156            com.google.protobuf.GeneratedMessage
7157            implements INodeOrBuilder {
7158          // Use INode.newBuilder() to construct.
7159          private INode(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7160            super(builder);
7161            this.unknownFields = builder.getUnknownFields();
7162          }
7163          private INode(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7164    
7165          private static final INode defaultInstance;
7166          public static INode getDefaultInstance() {
7167            return defaultInstance;
7168          }
7169    
7170          public INode getDefaultInstanceForType() {
7171            return defaultInstance;
7172          }
7173    
7174          private final com.google.protobuf.UnknownFieldSet unknownFields;
7175          @java.lang.Override
7176          public final com.google.protobuf.UnknownFieldSet
7177              getUnknownFields() {
7178            return this.unknownFields;
7179          }
7180          private INode(
7181              com.google.protobuf.CodedInputStream input,
7182              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7183              throws com.google.protobuf.InvalidProtocolBufferException {
7184            initFields();
7185            int mutable_bitField0_ = 0;
7186            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7187                com.google.protobuf.UnknownFieldSet.newBuilder();
7188            try {
7189              boolean done = false;
7190              while (!done) {
7191                int tag = input.readTag();
7192                switch (tag) {
7193                  case 0:
7194                    done = true;
7195                    break;
7196                  default: {
7197                    if (!parseUnknownField(input, unknownFields,
7198                                           extensionRegistry, tag)) {
7199                      done = true;
7200                    }
7201                    break;
7202                  }
7203                  case 8: {
7204                    int rawValue = input.readEnum();
7205                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type value = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.valueOf(rawValue);
7206                    if (value == null) {
7207                      unknownFields.mergeVarintField(1, rawValue);
7208                    } else {
7209                      bitField0_ |= 0x00000001;
7210                      type_ = value;
7211                    }
7212                    break;
7213                  }
7214                  case 16: {
7215                    bitField0_ |= 0x00000002;
7216                    id_ = input.readUInt64();
7217                    break;
7218                  }
7219                  case 26: {
7220                    bitField0_ |= 0x00000004;
7221                    name_ = input.readBytes();
7222                    break;
7223                  }
7224                  case 34: {
7225                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder subBuilder = null;
7226                    if (((bitField0_ & 0x00000008) == 0x00000008)) {
7227                      subBuilder = file_.toBuilder();
7228                    }
7229                    file_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.PARSER, extensionRegistry);
7230                    if (subBuilder != null) {
7231                      subBuilder.mergeFrom(file_);
7232                      file_ = subBuilder.buildPartial();
7233                    }
7234                    bitField0_ |= 0x00000008;
7235                    break;
7236                  }
7237                  case 42: {
7238                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder subBuilder = null;
7239                    if (((bitField0_ & 0x00000010) == 0x00000010)) {
7240                      subBuilder = directory_.toBuilder();
7241                    }
7242                    directory_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.PARSER, extensionRegistry);
7243                    if (subBuilder != null) {
7244                      subBuilder.mergeFrom(directory_);
7245                      directory_ = subBuilder.buildPartial();
7246                    }
7247                    bitField0_ |= 0x00000010;
7248                    break;
7249                  }
7250                  case 50: {
7251                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder subBuilder = null;
7252                    if (((bitField0_ & 0x00000020) == 0x00000020)) {
7253                      subBuilder = symlink_.toBuilder();
7254                    }
7255                    symlink_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.PARSER, extensionRegistry);
7256                    if (subBuilder != null) {
7257                      subBuilder.mergeFrom(symlink_);
7258                      symlink_ = subBuilder.buildPartial();
7259                    }
7260                    bitField0_ |= 0x00000020;
7261                    break;
7262                  }
7263                }
7264              }
7265            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7266              throw e.setUnfinishedMessage(this);
7267            } catch (java.io.IOException e) {
7268              throw new com.google.protobuf.InvalidProtocolBufferException(
7269                  e.getMessage()).setUnfinishedMessage(this);
7270            } finally {
7271              this.unknownFields = unknownFields.build();
7272              makeExtensionsImmutable();
7273            }
7274          }
7275          public static final com.google.protobuf.Descriptors.Descriptor
7276              getDescriptor() {
7277            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
7278          }
7279    
7280          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7281              internalGetFieldAccessorTable() {
7282            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable
7283                .ensureFieldAccessorsInitialized(
7284                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder.class);
7285          }
7286    
7287          public static com.google.protobuf.Parser<INode> PARSER =
7288              new com.google.protobuf.AbstractParser<INode>() {
7289            public INode parsePartialFrom(
7290                com.google.protobuf.CodedInputStream input,
7291                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7292                throws com.google.protobuf.InvalidProtocolBufferException {
7293              return new INode(input, extensionRegistry);
7294            }
7295          };
7296    
7297          @java.lang.Override
7298          public com.google.protobuf.Parser<INode> getParserForType() {
7299            return PARSER;
7300          }
7301    
7302          /**
7303           * Protobuf enum {@code hadoop.hdfs.fsimage.INodeSection.INode.Type}
7304           */
7305          public enum Type
7306              implements com.google.protobuf.ProtocolMessageEnum {
7307            /**
7308             * <code>FILE = 1;</code>
7309             */
7310            FILE(0, 1),
7311            /**
7312             * <code>DIRECTORY = 2;</code>
7313             */
7314            DIRECTORY(1, 2),
7315            /**
7316             * <code>SYMLINK = 3;</code>
7317             */
7318            SYMLINK(2, 3),
7319            ;
7320    
7321            /**
7322             * <code>FILE = 1;</code>
7323             */
7324            public static final int FILE_VALUE = 1;
7325            /**
7326             * <code>DIRECTORY = 2;</code>
7327             */
7328            public static final int DIRECTORY_VALUE = 2;
7329            /**
7330             * <code>SYMLINK = 3;</code>
7331             */
7332            public static final int SYMLINK_VALUE = 3;
7333    
7334    
7335            public final int getNumber() { return value; }
7336    
7337            public static Type valueOf(int value) {
7338              switch (value) {
7339                case 1: return FILE;
7340                case 2: return DIRECTORY;
7341                case 3: return SYMLINK;
7342                default: return null;
7343              }
7344            }
7345    
7346            public static com.google.protobuf.Internal.EnumLiteMap<Type>
7347                internalGetValueMap() {
7348              return internalValueMap;
7349            }
7350            private static com.google.protobuf.Internal.EnumLiteMap<Type>
7351                internalValueMap =
7352                  new com.google.protobuf.Internal.EnumLiteMap<Type>() {
7353                    public Type findValueByNumber(int number) {
7354                      return Type.valueOf(number);
7355                    }
7356                  };
7357    
7358            public final com.google.protobuf.Descriptors.EnumValueDescriptor
7359                getValueDescriptor() {
7360              return getDescriptor().getValues().get(index);
7361            }
7362            public final com.google.protobuf.Descriptors.EnumDescriptor
7363                getDescriptorForType() {
7364              return getDescriptor();
7365            }
7366            public static final com.google.protobuf.Descriptors.EnumDescriptor
7367                getDescriptor() {
7368              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDescriptor().getEnumTypes().get(0);
7369            }
7370    
7371            private static final Type[] VALUES = values();
7372    
7373            public static Type valueOf(
7374                com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
7375              if (desc.getType() != getDescriptor()) {
7376                throw new java.lang.IllegalArgumentException(
7377                  "EnumValueDescriptor is not for this type.");
7378              }
7379              return VALUES[desc.getIndex()];
7380            }
7381    
7382            private final int index;
7383            private final int value;
7384    
7385            private Type(int index, int value) {
7386              this.index = index;
7387              this.value = value;
7388            }
7389    
7390            // @@protoc_insertion_point(enum_scope:hadoop.hdfs.fsimage.INodeSection.INode.Type)
7391          }
7392    
7393          private int bitField0_;
7394          // required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;
7395          public static final int TYPE_FIELD_NUMBER = 1;
7396          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type type_;
7397          /**
7398           * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
7399           */
7400          public boolean hasType() {
7401            return ((bitField0_ & 0x00000001) == 0x00000001);
7402          }
7403          /**
7404           * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
7405           */
7406          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type getType() {
7407            return type_;
7408          }
7409    
7410          // required uint64 id = 2;
7411          public static final int ID_FIELD_NUMBER = 2;
7412          private long id_;
7413          /**
7414           * <code>required uint64 id = 2;</code>
7415           */
7416          public boolean hasId() {
7417            return ((bitField0_ & 0x00000002) == 0x00000002);
7418          }
7419          /**
7420           * <code>required uint64 id = 2;</code>
7421           */
7422          public long getId() {
7423            return id_;
7424          }
7425    
7426          // optional bytes name = 3;
7427          public static final int NAME_FIELD_NUMBER = 3;
7428          private com.google.protobuf.ByteString name_;
7429          /**
7430           * <code>optional bytes name = 3;</code>
7431           */
7432          public boolean hasName() {
7433            return ((bitField0_ & 0x00000004) == 0x00000004);
7434          }
7435          /**
7436           * <code>optional bytes name = 3;</code>
7437           */
7438          public com.google.protobuf.ByteString getName() {
7439            return name_;
7440          }
7441    
7442          // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;
7443          public static final int FILE_FIELD_NUMBER = 4;
7444          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile file_;
7445          /**
7446           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
7447           */
7448          public boolean hasFile() {
7449            return ((bitField0_ & 0x00000008) == 0x00000008);
7450          }
7451          /**
7452           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
7453           */
7454          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getFile() {
7455            return file_;
7456          }
7457          /**
7458           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
7459           */
7460          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getFileOrBuilder() {
7461            return file_;
7462          }
7463    
7464          // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;
7465          public static final int DIRECTORY_FIELD_NUMBER = 5;
7466          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory directory_;
7467          /**
7468           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
7469           */
7470          public boolean hasDirectory() {
7471            return ((bitField0_ & 0x00000010) == 0x00000010);
7472          }
7473          /**
7474           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
7475           */
7476          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDirectory() {
7477            return directory_;
7478          }
7479          /**
7480           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
7481           */
7482          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getDirectoryOrBuilder() {
7483            return directory_;
7484          }
7485    
7486          // optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;
7487          public static final int SYMLINK_FIELD_NUMBER = 6;
7488          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink symlink_;
7489          /**
7490           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
7491           */
7492          public boolean hasSymlink() {
7493            return ((bitField0_ & 0x00000020) == 0x00000020);
7494          }
7495          /**
7496           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
7497           */
7498          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getSymlink() {
7499            return symlink_;
7500          }
7501          /**
7502           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
7503           */
7504          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder getSymlinkOrBuilder() {
7505            return symlink_;
7506          }
7507    
7508          private void initFields() {
7509            type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
7510            id_ = 0L;
7511            name_ = com.google.protobuf.ByteString.EMPTY;
7512            file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
7513            directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
7514            symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
7515          }
7516          private byte memoizedIsInitialized = -1;
7517          public final boolean isInitialized() {
7518            byte isInitialized = memoizedIsInitialized;
7519            if (isInitialized != -1) return isInitialized == 1;
7520    
7521            if (!hasType()) {
7522              memoizedIsInitialized = 0;
7523              return false;
7524            }
7525            if (!hasId()) {
7526              memoizedIsInitialized = 0;
7527              return false;
7528            }
7529            if (hasFile()) {
7530              if (!getFile().isInitialized()) {
7531                memoizedIsInitialized = 0;
7532                return false;
7533              }
7534            }
7535            memoizedIsInitialized = 1;
7536            return true;
7537          }
7538    
7539          public void writeTo(com.google.protobuf.CodedOutputStream output)
7540                              throws java.io.IOException {
7541            getSerializedSize();
7542            if (((bitField0_ & 0x00000001) == 0x00000001)) {
7543              output.writeEnum(1, type_.getNumber());
7544            }
7545            if (((bitField0_ & 0x00000002) == 0x00000002)) {
7546              output.writeUInt64(2, id_);
7547            }
7548            if (((bitField0_ & 0x00000004) == 0x00000004)) {
7549              output.writeBytes(3, name_);
7550            }
7551            if (((bitField0_ & 0x00000008) == 0x00000008)) {
7552              output.writeMessage(4, file_);
7553            }
7554            if (((bitField0_ & 0x00000010) == 0x00000010)) {
7555              output.writeMessage(5, directory_);
7556            }
7557            if (((bitField0_ & 0x00000020) == 0x00000020)) {
7558              output.writeMessage(6, symlink_);
7559            }
7560            getUnknownFields().writeTo(output);
7561          }
7562    
7563          private int memoizedSerializedSize = -1;
7564          public int getSerializedSize() {
7565            int size = memoizedSerializedSize;
7566            if (size != -1) return size;
7567    
7568            size = 0;
7569            if (((bitField0_ & 0x00000001) == 0x00000001)) {
7570              size += com.google.protobuf.CodedOutputStream
7571                .computeEnumSize(1, type_.getNumber());
7572            }
7573            if (((bitField0_ & 0x00000002) == 0x00000002)) {
7574              size += com.google.protobuf.CodedOutputStream
7575                .computeUInt64Size(2, id_);
7576            }
7577            if (((bitField0_ & 0x00000004) == 0x00000004)) {
7578              size += com.google.protobuf.CodedOutputStream
7579                .computeBytesSize(3, name_);
7580            }
7581            if (((bitField0_ & 0x00000008) == 0x00000008)) {
7582              size += com.google.protobuf.CodedOutputStream
7583                .computeMessageSize(4, file_);
7584            }
7585            if (((bitField0_ & 0x00000010) == 0x00000010)) {
7586              size += com.google.protobuf.CodedOutputStream
7587                .computeMessageSize(5, directory_);
7588            }
7589            if (((bitField0_ & 0x00000020) == 0x00000020)) {
7590              size += com.google.protobuf.CodedOutputStream
7591                .computeMessageSize(6, symlink_);
7592            }
7593            size += getUnknownFields().getSerializedSize();
7594            memoizedSerializedSize = size;
7595            return size;
7596          }
7597    
7598          private static final long serialVersionUID = 0L;
7599          @java.lang.Override
7600          protected java.lang.Object writeReplace()
7601              throws java.io.ObjectStreamException {
7602            return super.writeReplace();
7603          }
7604    
7605          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
7606              com.google.protobuf.ByteString data)
7607              throws com.google.protobuf.InvalidProtocolBufferException {
7608            return PARSER.parseFrom(data);
7609          }
7610          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
7611              com.google.protobuf.ByteString data,
7612              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7613              throws com.google.protobuf.InvalidProtocolBufferException {
7614            return PARSER.parseFrom(data, extensionRegistry);
7615          }
7616          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(byte[] data)
7617              throws com.google.protobuf.InvalidProtocolBufferException {
7618            return PARSER.parseFrom(data);
7619          }
7620          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
7621              byte[] data,
7622              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7623              throws com.google.protobuf.InvalidProtocolBufferException {
7624            return PARSER.parseFrom(data, extensionRegistry);
7625          }
7626          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(java.io.InputStream input)
7627              throws java.io.IOException {
7628            return PARSER.parseFrom(input);
7629          }
7630          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
7631              java.io.InputStream input,
7632              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7633              throws java.io.IOException {
7634            return PARSER.parseFrom(input, extensionRegistry);
7635          }
7636          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseDelimitedFrom(java.io.InputStream input)
7637              throws java.io.IOException {
7638            return PARSER.parseDelimitedFrom(input);
7639          }
7640          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseDelimitedFrom(
7641              java.io.InputStream input,
7642              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7643              throws java.io.IOException {
7644            return PARSER.parseDelimitedFrom(input, extensionRegistry);
7645          }
7646          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
7647              com.google.protobuf.CodedInputStream input)
7648              throws java.io.IOException {
7649            return PARSER.parseFrom(input);
7650          }
7651          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
7652              com.google.protobuf.CodedInputStream input,
7653              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7654              throws java.io.IOException {
7655            return PARSER.parseFrom(input, extensionRegistry);
7656          }
7657    
7658          public static Builder newBuilder() { return Builder.create(); }
7659          public Builder newBuilderForType() { return newBuilder(); }
7660          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode prototype) {
7661            return newBuilder().mergeFrom(prototype);
7662          }
7663          public Builder toBuilder() { return newBuilder(this); }
7664    
7665          @java.lang.Override
7666          protected Builder newBuilderForType(
7667              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7668            Builder builder = new Builder(parent);
7669            return builder;
7670          }
7671          /**
7672           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INode}
7673           */
7674          public static final class Builder extends
7675              com.google.protobuf.GeneratedMessage.Builder<Builder>
7676             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder {
7677            public static final com.google.protobuf.Descriptors.Descriptor
7678                getDescriptor() {
7679              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
7680            }
7681    
7682            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7683                internalGetFieldAccessorTable() {
7684              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable
7685                  .ensureFieldAccessorsInitialized(
7686                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder.class);
7687            }
7688    
7689            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.newBuilder()
7690            private Builder() {
7691              maybeForceBuilderInitialization();
7692            }
7693    
7694            private Builder(
7695                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7696              super(parent);
7697              maybeForceBuilderInitialization();
7698            }
7699            private void maybeForceBuilderInitialization() {
7700              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7701                getFileFieldBuilder();
7702                getDirectoryFieldBuilder();
7703                getSymlinkFieldBuilder();
7704              }
7705            }
7706            private static Builder create() {
7707              return new Builder();
7708            }
7709    
7710            public Builder clear() {
7711              super.clear();
7712              type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
7713              bitField0_ = (bitField0_ & ~0x00000001);
7714              id_ = 0L;
7715              bitField0_ = (bitField0_ & ~0x00000002);
7716              name_ = com.google.protobuf.ByteString.EMPTY;
7717              bitField0_ = (bitField0_ & ~0x00000004);
7718              if (fileBuilder_ == null) {
7719                file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
7720              } else {
7721                fileBuilder_.clear();
7722              }
7723              bitField0_ = (bitField0_ & ~0x00000008);
7724              if (directoryBuilder_ == null) {
7725                directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
7726              } else {
7727                directoryBuilder_.clear();
7728              }
7729              bitField0_ = (bitField0_ & ~0x00000010);
7730              if (symlinkBuilder_ == null) {
7731                symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
7732              } else {
7733                symlinkBuilder_.clear();
7734              }
7735              bitField0_ = (bitField0_ & ~0x00000020);
7736              return this;
7737            }
7738    
7739            public Builder clone() {
7740              return create().mergeFrom(buildPartial());
7741            }
7742    
7743            public com.google.protobuf.Descriptors.Descriptor
7744                getDescriptorForType() {
7745              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
7746            }
7747    
7748            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getDefaultInstanceForType() {
7749              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
7750            }
7751    
7752            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode build() {
7753              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode result = buildPartial();
7754              if (!result.isInitialized()) {
7755                throw newUninitializedMessageException(result);
7756              }
7757              return result;
7758            }
7759    
7760            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode buildPartial() {
7761              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode(this);
7762              int from_bitField0_ = bitField0_;
7763              int to_bitField0_ = 0;
7764              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7765                to_bitField0_ |= 0x00000001;
7766              }
7767              result.type_ = type_;
7768              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
7769                to_bitField0_ |= 0x00000002;
7770              }
7771              result.id_ = id_;
7772              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
7773                to_bitField0_ |= 0x00000004;
7774              }
7775              result.name_ = name_;
7776              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
7777                to_bitField0_ |= 0x00000008;
7778              }
7779              if (fileBuilder_ == null) {
7780                result.file_ = file_;
7781              } else {
7782                result.file_ = fileBuilder_.build();
7783              }
7784              if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
7785                to_bitField0_ |= 0x00000010;
7786              }
7787              if (directoryBuilder_ == null) {
7788                result.directory_ = directory_;
7789              } else {
7790                result.directory_ = directoryBuilder_.build();
7791              }
7792              if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
7793                to_bitField0_ |= 0x00000020;
7794              }
7795              if (symlinkBuilder_ == null) {
7796                result.symlink_ = symlink_;
7797              } else {
7798                result.symlink_ = symlinkBuilder_.build();
7799              }
7800              result.bitField0_ = to_bitField0_;
7801              onBuilt();
7802              return result;
7803            }
7804    
7805            public Builder mergeFrom(com.google.protobuf.Message other) {
7806              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode) {
7807                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode)other);
7808              } else {
7809                super.mergeFrom(other);
7810                return this;
7811              }
7812            }
7813    
7814            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode other) {
7815              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance()) return this;
7816              if (other.hasType()) {
7817                setType(other.getType());
7818              }
7819              if (other.hasId()) {
7820                setId(other.getId());
7821              }
7822              if (other.hasName()) {
7823                setName(other.getName());
7824              }
7825              if (other.hasFile()) {
7826                mergeFile(other.getFile());
7827              }
7828              if (other.hasDirectory()) {
7829                mergeDirectory(other.getDirectory());
7830              }
7831              if (other.hasSymlink()) {
7832                mergeSymlink(other.getSymlink());
7833              }
7834              this.mergeUnknownFields(other.getUnknownFields());
7835              return this;
7836            }
7837    
7838            public final boolean isInitialized() {
7839              if (!hasType()) {
7840                
7841                return false;
7842              }
7843              if (!hasId()) {
7844                
7845                return false;
7846              }
7847              if (hasFile()) {
7848                if (!getFile().isInitialized()) {
7849                  
7850                  return false;
7851                }
7852              }
7853              return true;
7854            }
7855    
7856            public Builder mergeFrom(
7857                com.google.protobuf.CodedInputStream input,
7858                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7859                throws java.io.IOException {
7860              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parsedMessage = null;
7861              try {
7862                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7863              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7864                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode) e.getUnfinishedMessage();
7865                throw e;
7866              } finally {
7867                if (parsedMessage != null) {
7868                  mergeFrom(parsedMessage);
7869                }
7870              }
7871              return this;
7872            }
7873            private int bitField0_;
7874    
7875            // required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;
7876            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
7877            /**
7878             * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
7879             */
7880            public boolean hasType() {
7881              return ((bitField0_ & 0x00000001) == 0x00000001);
7882            }
7883            /**
7884             * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
7885             */
7886            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type getType() {
7887              return type_;
7888            }
7889            /**
7890             * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
7891             */
7892            public Builder setType(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type value) {
7893              if (value == null) {
7894                throw new NullPointerException();
7895              }
7896              bitField0_ |= 0x00000001;
7897              type_ = value;
7898              onChanged();
7899              return this;
7900            }
7901            /**
7902             * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
7903             */
7904            public Builder clearType() {
7905              bitField0_ = (bitField0_ & ~0x00000001);
7906              type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
7907              onChanged();
7908              return this;
7909            }
7910    
7911            // required uint64 id = 2;
7912            private long id_ ;
7913            /**
7914             * <code>required uint64 id = 2;</code>
7915             */
7916            public boolean hasId() {
7917              return ((bitField0_ & 0x00000002) == 0x00000002);
7918            }
7919            /**
7920             * <code>required uint64 id = 2;</code>
7921             */
7922            public long getId() {
7923              return id_;
7924            }
7925            /**
7926             * <code>required uint64 id = 2;</code>
7927             */
7928            public Builder setId(long value) {
7929              bitField0_ |= 0x00000002;
7930              id_ = value;
7931              onChanged();
7932              return this;
7933            }
7934            /**
7935             * <code>required uint64 id = 2;</code>
7936             */
7937            public Builder clearId() {
7938              bitField0_ = (bitField0_ & ~0x00000002);
7939              id_ = 0L;
7940              onChanged();
7941              return this;
7942            }
7943    
7944            // optional bytes name = 3;
7945            private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
7946            /**
7947             * <code>optional bytes name = 3;</code>
7948             */
7949            public boolean hasName() {
7950              return ((bitField0_ & 0x00000004) == 0x00000004);
7951            }
7952            /**
7953             * <code>optional bytes name = 3;</code>
7954             */
7955            public com.google.protobuf.ByteString getName() {
7956              return name_;
7957            }
7958            /**
7959             * <code>optional bytes name = 3;</code>
7960             */
7961            public Builder setName(com.google.protobuf.ByteString value) {
7962              if (value == null) {
7963        throw new NullPointerException();
7964      }
7965      bitField0_ |= 0x00000004;
7966              name_ = value;
7967              onChanged();
7968              return this;
7969            }
7970            /**
7971             * <code>optional bytes name = 3;</code>
7972             */
7973            public Builder clearName() {
7974              bitField0_ = (bitField0_ & ~0x00000004);
7975              name_ = getDefaultInstance().getName();
7976              onChanged();
7977              return this;
7978            }
7979    
7980            // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;
7981            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
7982            private com.google.protobuf.SingleFieldBuilder<
7983                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> fileBuilder_;
7984            /**
7985             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
7986             */
7987            public boolean hasFile() {
7988              return ((bitField0_ & 0x00000008) == 0x00000008);
7989            }
7990            /**
7991             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
7992             */
7993            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getFile() {
7994              if (fileBuilder_ == null) {
7995                return file_;
7996              } else {
7997                return fileBuilder_.getMessage();
7998              }
7999            }
8000            /**
8001             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8002             */
8003            public Builder setFile(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
8004              if (fileBuilder_ == null) {
8005                if (value == null) {
8006                  throw new NullPointerException();
8007                }
8008                file_ = value;
8009                onChanged();
8010              } else {
8011                fileBuilder_.setMessage(value);
8012              }
8013              bitField0_ |= 0x00000008;
8014              return this;
8015            }
8016            /**
8017             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8018             */
8019            public Builder setFile(
8020                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder builderForValue) {
8021              if (fileBuilder_ == null) {
8022                file_ = builderForValue.build();
8023                onChanged();
8024              } else {
8025                fileBuilder_.setMessage(builderForValue.build());
8026              }
8027              bitField0_ |= 0x00000008;
8028              return this;
8029            }
8030            /**
8031             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8032             */
8033            public Builder mergeFile(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
8034              if (fileBuilder_ == null) {
8035                if (((bitField0_ & 0x00000008) == 0x00000008) &&
8036                    file_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance()) {
8037                  file_ =
8038                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.newBuilder(file_).mergeFrom(value).buildPartial();
8039                } else {
8040                  file_ = value;
8041                }
8042                onChanged();
8043              } else {
8044                fileBuilder_.mergeFrom(value);
8045              }
8046              bitField0_ |= 0x00000008;
8047              return this;
8048            }
8049            /**
8050             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8051             */
8052            public Builder clearFile() {
8053              if (fileBuilder_ == null) {
8054                file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
8055                onChanged();
8056              } else {
8057                fileBuilder_.clear();
8058              }
8059              bitField0_ = (bitField0_ & ~0x00000008);
8060              return this;
8061            }
8062            /**
8063             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8064             */
8065            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder getFileBuilder() {
8066              bitField0_ |= 0x00000008;
8067              onChanged();
8068              return getFileFieldBuilder().getBuilder();
8069            }
8070            /**
8071             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8072             */
8073            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getFileOrBuilder() {
8074              if (fileBuilder_ != null) {
8075                return fileBuilder_.getMessageOrBuilder();
8076              } else {
8077                return file_;
8078              }
8079            }
8080            /**
8081             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8082             */
8083            private com.google.protobuf.SingleFieldBuilder<
8084                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> 
8085                getFileFieldBuilder() {
8086              if (fileBuilder_ == null) {
8087                fileBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8088                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder>(
8089                        file_,
8090                        getParentForChildren(),
8091                        isClean());
8092                file_ = null;
8093              }
8094              return fileBuilder_;
8095            }
8096    
8097            // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;
8098            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
8099            private com.google.protobuf.SingleFieldBuilder<
8100                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> directoryBuilder_;
8101            /**
8102             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8103             */
8104            public boolean hasDirectory() {
8105              return ((bitField0_ & 0x00000010) == 0x00000010);
8106            }
8107            /**
8108             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8109             */
8110            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDirectory() {
8111              if (directoryBuilder_ == null) {
8112                return directory_;
8113              } else {
8114                return directoryBuilder_.getMessage();
8115              }
8116            }
8117            /**
8118             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8119             */
8120            public Builder setDirectory(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
8121              if (directoryBuilder_ == null) {
8122                if (value == null) {
8123                  throw new NullPointerException();
8124                }
8125                directory_ = value;
8126                onChanged();
8127              } else {
8128                directoryBuilder_.setMessage(value);
8129              }
8130              bitField0_ |= 0x00000010;
8131              return this;
8132            }
8133            /**
8134             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8135             */
8136            public Builder setDirectory(
8137                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder builderForValue) {
8138              if (directoryBuilder_ == null) {
8139                directory_ = builderForValue.build();
8140                onChanged();
8141              } else {
8142                directoryBuilder_.setMessage(builderForValue.build());
8143              }
8144              bitField0_ |= 0x00000010;
8145              return this;
8146            }
8147            /**
8148             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8149             */
8150            public Builder mergeDirectory(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
8151              if (directoryBuilder_ == null) {
8152                if (((bitField0_ & 0x00000010) == 0x00000010) &&
8153                    directory_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance()) {
8154                  directory_ =
8155                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.newBuilder(directory_).mergeFrom(value).buildPartial();
8156                } else {
8157                  directory_ = value;
8158                }
8159                onChanged();
8160              } else {
8161                directoryBuilder_.mergeFrom(value);
8162              }
8163              bitField0_ |= 0x00000010;
8164              return this;
8165            }
8166            /**
8167             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8168             */
8169            public Builder clearDirectory() {
8170              if (directoryBuilder_ == null) {
8171                directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
8172                onChanged();
8173              } else {
8174                directoryBuilder_.clear();
8175              }
8176              bitField0_ = (bitField0_ & ~0x00000010);
8177              return this;
8178            }
8179            /**
8180             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8181             */
8182            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder getDirectoryBuilder() {
8183              bitField0_ |= 0x00000010;
8184              onChanged();
8185              return getDirectoryFieldBuilder().getBuilder();
8186            }
8187            /**
8188             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8189             */
8190            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getDirectoryOrBuilder() {
8191              if (directoryBuilder_ != null) {
8192                return directoryBuilder_.getMessageOrBuilder();
8193              } else {
8194                return directory_;
8195              }
8196            }
8197            /**
8198             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8199             */
8200            private com.google.protobuf.SingleFieldBuilder<
8201                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> 
8202                getDirectoryFieldBuilder() {
8203              if (directoryBuilder_ == null) {
8204                directoryBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8205                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder>(
8206                        directory_,
8207                        getParentForChildren(),
8208                        isClean());
8209                directory_ = null;
8210              }
8211              return directoryBuilder_;
8212            }
8213    
8214            // optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;
8215            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
8216            private com.google.protobuf.SingleFieldBuilder<
8217                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder> symlinkBuilder_;
8218            /**
8219             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8220             */
8221            public boolean hasSymlink() {
8222              return ((bitField0_ & 0x00000020) == 0x00000020);
8223            }
8224            /**
8225             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8226             */
8227            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getSymlink() {
8228              if (symlinkBuilder_ == null) {
8229                return symlink_;
8230              } else {
8231                return symlinkBuilder_.getMessage();
8232              }
8233            }
8234            /**
8235             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8236             */
8237            public Builder setSymlink(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink value) {
8238              if (symlinkBuilder_ == null) {
8239                if (value == null) {
8240                  throw new NullPointerException();
8241                }
8242                symlink_ = value;
8243                onChanged();
8244              } else {
8245                symlinkBuilder_.setMessage(value);
8246              }
8247              bitField0_ |= 0x00000020;
8248              return this;
8249            }
8250            /**
8251             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8252             */
8253            public Builder setSymlink(
8254                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder builderForValue) {
8255              if (symlinkBuilder_ == null) {
8256                symlink_ = builderForValue.build();
8257                onChanged();
8258              } else {
8259                symlinkBuilder_.setMessage(builderForValue.build());
8260              }
8261              bitField0_ |= 0x00000020;
8262              return this;
8263            }
8264            /**
8265             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8266             */
8267            public Builder mergeSymlink(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink value) {
8268              if (symlinkBuilder_ == null) {
8269                if (((bitField0_ & 0x00000020) == 0x00000020) &&
8270                    symlink_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance()) {
8271                  symlink_ =
8272                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.newBuilder(symlink_).mergeFrom(value).buildPartial();
8273                } else {
8274                  symlink_ = value;
8275                }
8276                onChanged();
8277              } else {
8278                symlinkBuilder_.mergeFrom(value);
8279              }
8280              bitField0_ |= 0x00000020;
8281              return this;
8282            }
8283            /**
8284             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8285             */
8286            public Builder clearSymlink() {
8287              if (symlinkBuilder_ == null) {
8288                symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
8289                onChanged();
8290              } else {
8291                symlinkBuilder_.clear();
8292              }
8293              bitField0_ = (bitField0_ & ~0x00000020);
8294              return this;
8295            }
8296            /**
8297             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8298             */
8299            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder getSymlinkBuilder() {
8300              bitField0_ |= 0x00000020;
8301              onChanged();
8302              return getSymlinkFieldBuilder().getBuilder();
8303            }
8304            /**
8305             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8306             */
8307            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder getSymlinkOrBuilder() {
8308              if (symlinkBuilder_ != null) {
8309                return symlinkBuilder_.getMessageOrBuilder();
8310              } else {
8311                return symlink_;
8312              }
8313            }
8314            /**
8315             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8316             */
8317            private com.google.protobuf.SingleFieldBuilder<
8318                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder> 
8319                getSymlinkFieldBuilder() {
8320              if (symlinkBuilder_ == null) {
8321                symlinkBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8322                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder>(
8323                        symlink_,
8324                        getParentForChildren(),
8325                        isClean());
8326                symlink_ = null;
8327              }
8328              return symlinkBuilder_;
8329            }
8330    
8331            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INode)
8332          }
8333    
8334          static {
8335            defaultInstance = new INode(true);
8336            defaultInstance.initFields();
8337          }
8338    
8339          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INode)
8340        }
8341    
8342        private int bitField0_;
8343        // optional uint64 lastInodeId = 1;
8344        public static final int LASTINODEID_FIELD_NUMBER = 1;
8345        private long lastInodeId_;
8346        /**
8347         * <code>optional uint64 lastInodeId = 1;</code>
8348         */
8349        public boolean hasLastInodeId() {
8350          return ((bitField0_ & 0x00000001) == 0x00000001);
8351        }
8352        /**
8353         * <code>optional uint64 lastInodeId = 1;</code>
8354         */
8355        public long getLastInodeId() {
8356          return lastInodeId_;
8357        }
8358    
8359        // optional uint64 numInodes = 2;
8360        public static final int NUMINODES_FIELD_NUMBER = 2;
8361        private long numInodes_;
8362        /**
8363         * <code>optional uint64 numInodes = 2;</code>
8364         *
8365         * <pre>
8366         * repeated INodes..
8367         * </pre>
8368         */
8369        public boolean hasNumInodes() {
8370          return ((bitField0_ & 0x00000002) == 0x00000002);
8371        }
8372        /**
8373         * <code>optional uint64 numInodes = 2;</code>
8374         *
8375         * <pre>
8376         * repeated INodes..
8377         * </pre>
8378         */
8379        public long getNumInodes() {
8380          return numInodes_;
8381        }
8382    
8383        private void initFields() {
8384          lastInodeId_ = 0L;
8385          numInodes_ = 0L;
8386        }
8387        private byte memoizedIsInitialized = -1;
8388        public final boolean isInitialized() {
8389          byte isInitialized = memoizedIsInitialized;
8390          if (isInitialized != -1) return isInitialized == 1;
8391    
8392          memoizedIsInitialized = 1;
8393          return true;
8394        }
8395    
8396        public void writeTo(com.google.protobuf.CodedOutputStream output)
8397                            throws java.io.IOException {
8398          getSerializedSize();
8399          if (((bitField0_ & 0x00000001) == 0x00000001)) {
8400            output.writeUInt64(1, lastInodeId_);
8401          }
8402          if (((bitField0_ & 0x00000002) == 0x00000002)) {
8403            output.writeUInt64(2, numInodes_);
8404          }
8405          getUnknownFields().writeTo(output);
8406        }
8407    
8408        private int memoizedSerializedSize = -1;
8409        public int getSerializedSize() {
8410          int size = memoizedSerializedSize;
8411          if (size != -1) return size;
8412    
8413          size = 0;
8414          if (((bitField0_ & 0x00000001) == 0x00000001)) {
8415            size += com.google.protobuf.CodedOutputStream
8416              .computeUInt64Size(1, lastInodeId_);
8417          }
8418          if (((bitField0_ & 0x00000002) == 0x00000002)) {
8419            size += com.google.protobuf.CodedOutputStream
8420              .computeUInt64Size(2, numInodes_);
8421          }
8422          size += getUnknownFields().getSerializedSize();
8423          memoizedSerializedSize = size;
8424          return size;
8425        }
8426    
8427        private static final long serialVersionUID = 0L;
8428        @java.lang.Override
8429        protected java.lang.Object writeReplace()
8430            throws java.io.ObjectStreamException {
8431          return super.writeReplace();
8432        }
8433    
8434        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
8435            com.google.protobuf.ByteString data)
8436            throws com.google.protobuf.InvalidProtocolBufferException {
8437          return PARSER.parseFrom(data);
8438        }
8439        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
8440            com.google.protobuf.ByteString data,
8441            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8442            throws com.google.protobuf.InvalidProtocolBufferException {
8443          return PARSER.parseFrom(data, extensionRegistry);
8444        }
8445        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(byte[] data)
8446            throws com.google.protobuf.InvalidProtocolBufferException {
8447          return PARSER.parseFrom(data);
8448        }
8449        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
8450            byte[] data,
8451            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8452            throws com.google.protobuf.InvalidProtocolBufferException {
8453          return PARSER.parseFrom(data, extensionRegistry);
8454        }
8455        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(java.io.InputStream input)
8456            throws java.io.IOException {
8457          return PARSER.parseFrom(input);
8458        }
8459        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
8460            java.io.InputStream input,
8461            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8462            throws java.io.IOException {
8463          return PARSER.parseFrom(input, extensionRegistry);
8464        }
8465        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseDelimitedFrom(java.io.InputStream input)
8466            throws java.io.IOException {
8467          return PARSER.parseDelimitedFrom(input);
8468        }
8469        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseDelimitedFrom(
8470            java.io.InputStream input,
8471            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8472            throws java.io.IOException {
8473          return PARSER.parseDelimitedFrom(input, extensionRegistry);
8474        }
8475        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
8476            com.google.protobuf.CodedInputStream input)
8477            throws java.io.IOException {
8478          return PARSER.parseFrom(input);
8479        }
8480        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
8481            com.google.protobuf.CodedInputStream input,
8482            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8483            throws java.io.IOException {
8484          return PARSER.parseFrom(input, extensionRegistry);
8485        }
8486    
8487        public static Builder newBuilder() { return Builder.create(); }
8488        public Builder newBuilderForType() { return newBuilder(); }
8489        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection prototype) {
8490          return newBuilder().mergeFrom(prototype);
8491        }
8492        public Builder toBuilder() { return newBuilder(this); }
8493    
8494        @java.lang.Override
8495        protected Builder newBuilderForType(
8496            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8497          Builder builder = new Builder(parent);
8498          return builder;
8499        }
8500        /**
8501         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection}
8502         *
8503         * <pre>
8504         **
8505         * Permission is serialized as a 64-bit long. [0:24):[25:48):[48:64) (in Big Endian).
8506         * The first and the second parts are the string ids of the user and
8507         * group name, and the last 16 bits are the permission bits.
8508         *
8509         * Name: INODE
8510         * </pre>
8511         */
8512        public static final class Builder extends
8513            com.google.protobuf.GeneratedMessage.Builder<Builder>
8514           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSectionOrBuilder {
8515          public static final com.google.protobuf.Descriptors.Descriptor
8516              getDescriptor() {
8517            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
8518          }
8519    
8520          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8521              internalGetFieldAccessorTable() {
8522            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable
8523                .ensureFieldAccessorsInitialized(
8524                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.Builder.class);
8525          }
8526    
8527          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.newBuilder()
8528          private Builder() {
8529            maybeForceBuilderInitialization();
8530          }
8531    
8532          private Builder(
8533              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8534            super(parent);
8535            maybeForceBuilderInitialization();
8536          }
8537          private void maybeForceBuilderInitialization() {
8538            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8539            }
8540          }
8541          private static Builder create() {
8542            return new Builder();
8543          }
8544    
8545          public Builder clear() {
8546            super.clear();
8547            lastInodeId_ = 0L;
8548            bitField0_ = (bitField0_ & ~0x00000001);
8549            numInodes_ = 0L;
8550            bitField0_ = (bitField0_ & ~0x00000002);
8551            return this;
8552          }
8553    
8554          public Builder clone() {
8555            return create().mergeFrom(buildPartial());
8556          }
8557    
8558          public com.google.protobuf.Descriptors.Descriptor
8559              getDescriptorForType() {
8560            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
8561          }
8562    
8563          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection getDefaultInstanceForType() {
8564            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.getDefaultInstance();
8565          }
8566    
8567          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection build() {
8568            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection result = buildPartial();
8569            if (!result.isInitialized()) {
8570              throw newUninitializedMessageException(result);
8571            }
8572            return result;
8573          }
8574    
8575          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection buildPartial() {
8576            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection(this);
8577            int from_bitField0_ = bitField0_;
8578            int to_bitField0_ = 0;
8579            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8580              to_bitField0_ |= 0x00000001;
8581            }
8582            result.lastInodeId_ = lastInodeId_;
8583            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
8584              to_bitField0_ |= 0x00000002;
8585            }
8586            result.numInodes_ = numInodes_;
8587            result.bitField0_ = to_bitField0_;
8588            onBuilt();
8589            return result;
8590          }
8591    
8592          public Builder mergeFrom(com.google.protobuf.Message other) {
8593            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection) {
8594              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection)other);
8595            } else {
8596              super.mergeFrom(other);
8597              return this;
8598            }
8599          }
8600    
8601          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection other) {
8602            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.getDefaultInstance()) return this;
8603            if (other.hasLastInodeId()) {
8604              setLastInodeId(other.getLastInodeId());
8605            }
8606            if (other.hasNumInodes()) {
8607              setNumInodes(other.getNumInodes());
8608            }
8609            this.mergeUnknownFields(other.getUnknownFields());
8610            return this;
8611          }
8612    
8613          public final boolean isInitialized() {
8614            return true;
8615          }
8616    
8617          public Builder mergeFrom(
8618              com.google.protobuf.CodedInputStream input,
8619              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8620              throws java.io.IOException {
8621            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parsedMessage = null;
8622            try {
8623              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8624            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8625              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection) e.getUnfinishedMessage();
8626              throw e;
8627            } finally {
8628              if (parsedMessage != null) {
8629                mergeFrom(parsedMessage);
8630              }
8631            }
8632            return this;
8633          }
8634          private int bitField0_;
8635    
8636          // optional uint64 lastInodeId = 1;
8637          private long lastInodeId_ ;
8638          /**
8639           * <code>optional uint64 lastInodeId = 1;</code>
8640           */
8641          public boolean hasLastInodeId() {
8642            return ((bitField0_ & 0x00000001) == 0x00000001);
8643          }
8644          /**
8645           * <code>optional uint64 lastInodeId = 1;</code>
8646           */
8647          public long getLastInodeId() {
8648            return lastInodeId_;
8649          }
8650          /**
8651           * <code>optional uint64 lastInodeId = 1;</code>
8652           */
8653          public Builder setLastInodeId(long value) {
8654            bitField0_ |= 0x00000001;
8655            lastInodeId_ = value;
8656            onChanged();
8657            return this;
8658          }
8659          /**
8660           * <code>optional uint64 lastInodeId = 1;</code>
8661           */
8662          public Builder clearLastInodeId() {
8663            bitField0_ = (bitField0_ & ~0x00000001);
8664            lastInodeId_ = 0L;
8665            onChanged();
8666            return this;
8667          }
8668    
8669          // optional uint64 numInodes = 2;
8670          private long numInodes_ ;
8671          /**
8672           * <code>optional uint64 numInodes = 2;</code>
8673           *
8674           * <pre>
8675           * repeated INodes..
8676           * </pre>
8677           */
8678          public boolean hasNumInodes() {
8679            return ((bitField0_ & 0x00000002) == 0x00000002);
8680          }
8681          /**
8682           * <code>optional uint64 numInodes = 2;</code>
8683           *
8684           * <pre>
8685           * repeated INodes..
8686           * </pre>
8687           */
8688          public long getNumInodes() {
8689            return numInodes_;
8690          }
8691          /**
8692           * <code>optional uint64 numInodes = 2;</code>
8693           *
8694           * <pre>
8695           * repeated INodes..
8696           * </pre>
8697           */
8698          public Builder setNumInodes(long value) {
8699            bitField0_ |= 0x00000002;
8700            numInodes_ = value;
8701            onChanged();
8702            return this;
8703          }
8704          /**
8705           * <code>optional uint64 numInodes = 2;</code>
8706           *
8707           * <pre>
8708           * repeated INodes..
8709           * </pre>
8710           */
8711          public Builder clearNumInodes() {
8712            bitField0_ = (bitField0_ & ~0x00000002);
8713            numInodes_ = 0L;
8714            onChanged();
8715            return this;
8716          }
8717    
8718          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection)
8719        }
8720    
8721        static {
8722          defaultInstance = new INodeSection(true);
8723          defaultInstance.initFields();
8724        }
8725    
8726        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection)
8727      }
8728    
8729      public interface FilesUnderConstructionSectionOrBuilder
8730          extends com.google.protobuf.MessageOrBuilder {
8731      }
8732      /**
8733       * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection}
8734       *
8735       * <pre>
8736       **
8737       * This section records information about under-construction files for
8738       * reconstructing the lease map.
8739       * NAME: FILES_UNDERCONSTRUCTION
8740       * </pre>
8741       */
8742      public static final class FilesUnderConstructionSection extends
8743          com.google.protobuf.GeneratedMessage
8744          implements FilesUnderConstructionSectionOrBuilder {
8745        // Use FilesUnderConstructionSection.newBuilder() to construct.
8746        private FilesUnderConstructionSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8747          super(builder);
8748          this.unknownFields = builder.getUnknownFields();
8749        }
8750        private FilesUnderConstructionSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8751    
8752        private static final FilesUnderConstructionSection defaultInstance;
8753        public static FilesUnderConstructionSection getDefaultInstance() {
8754          return defaultInstance;
8755        }
8756    
8757        public FilesUnderConstructionSection getDefaultInstanceForType() {
8758          return defaultInstance;
8759        }
8760    
8761        private final com.google.protobuf.UnknownFieldSet unknownFields;
8762        @java.lang.Override
8763        public final com.google.protobuf.UnknownFieldSet
8764            getUnknownFields() {
8765          return this.unknownFields;
8766        }
8767        private FilesUnderConstructionSection(
8768            com.google.protobuf.CodedInputStream input,
8769            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8770            throws com.google.protobuf.InvalidProtocolBufferException {
8771          initFields();
8772          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8773              com.google.protobuf.UnknownFieldSet.newBuilder();
8774          try {
8775            boolean done = false;
8776            while (!done) {
8777              int tag = input.readTag();
8778              switch (tag) {
8779                case 0:
8780                  done = true;
8781                  break;
8782                default: {
8783                  if (!parseUnknownField(input, unknownFields,
8784                                         extensionRegistry, tag)) {
8785                    done = true;
8786                  }
8787                  break;
8788                }
8789              }
8790            }
8791          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8792            throw e.setUnfinishedMessage(this);
8793          } catch (java.io.IOException e) {
8794            throw new com.google.protobuf.InvalidProtocolBufferException(
8795                e.getMessage()).setUnfinishedMessage(this);
8796          } finally {
8797            this.unknownFields = unknownFields.build();
8798            makeExtensionsImmutable();
8799          }
8800        }
8801        public static final com.google.protobuf.Descriptors.Descriptor
8802            getDescriptor() {
8803          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
8804        }
8805    
8806        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8807            internalGetFieldAccessorTable() {
8808          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable
8809              .ensureFieldAccessorsInitialized(
8810                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.Builder.class);
8811        }
8812    
8813        public static com.google.protobuf.Parser<FilesUnderConstructionSection> PARSER =
8814            new com.google.protobuf.AbstractParser<FilesUnderConstructionSection>() {
8815          public FilesUnderConstructionSection parsePartialFrom(
8816              com.google.protobuf.CodedInputStream input,
8817              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8818              throws com.google.protobuf.InvalidProtocolBufferException {
8819            return new FilesUnderConstructionSection(input, extensionRegistry);
8820          }
8821        };
8822    
8823        @java.lang.Override
8824        public com.google.protobuf.Parser<FilesUnderConstructionSection> getParserForType() {
8825          return PARSER;
8826        }
8827    
8828        public interface FileUnderConstructionEntryOrBuilder
8829            extends com.google.protobuf.MessageOrBuilder {
8830    
8831          // optional uint64 inodeId = 1;
8832          /**
8833           * <code>optional uint64 inodeId = 1;</code>
8834           */
8835          boolean hasInodeId();
8836          /**
8837           * <code>optional uint64 inodeId = 1;</code>
8838           */
8839          long getInodeId();
8840    
8841          // optional string fullPath = 2;
8842          /**
8843           * <code>optional string fullPath = 2;</code>
8844           */
8845          boolean hasFullPath();
8846          /**
8847           * <code>optional string fullPath = 2;</code>
8848           */
8849          java.lang.String getFullPath();
8850          /**
8851           * <code>optional string fullPath = 2;</code>
8852           */
8853          com.google.protobuf.ByteString
8854              getFullPathBytes();
8855        }
8856        /**
8857         * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry}
8858         */
8859        public static final class FileUnderConstructionEntry extends
8860            com.google.protobuf.GeneratedMessage
8861            implements FileUnderConstructionEntryOrBuilder {
8862          // Use FileUnderConstructionEntry.newBuilder() to construct.
8863          private FileUnderConstructionEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8864            super(builder);
8865            this.unknownFields = builder.getUnknownFields();
8866          }
8867          private FileUnderConstructionEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8868    
8869          private static final FileUnderConstructionEntry defaultInstance;
8870          public static FileUnderConstructionEntry getDefaultInstance() {
8871            return defaultInstance;
8872          }
8873    
8874          public FileUnderConstructionEntry getDefaultInstanceForType() {
8875            return defaultInstance;
8876          }
8877    
8878          private final com.google.protobuf.UnknownFieldSet unknownFields;
8879          @java.lang.Override
8880          public final com.google.protobuf.UnknownFieldSet
8881              getUnknownFields() {
8882            return this.unknownFields;
8883          }
8884          private FileUnderConstructionEntry(
8885              com.google.protobuf.CodedInputStream input,
8886              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8887              throws com.google.protobuf.InvalidProtocolBufferException {
8888            initFields();
8889            int mutable_bitField0_ = 0;
8890            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8891                com.google.protobuf.UnknownFieldSet.newBuilder();
8892            try {
8893              boolean done = false;
8894              while (!done) {
8895                int tag = input.readTag();
8896                switch (tag) {
8897                  case 0:
8898                    done = true;
8899                    break;
8900                  default: {
8901                    if (!parseUnknownField(input, unknownFields,
8902                                           extensionRegistry, tag)) {
8903                      done = true;
8904                    }
8905                    break;
8906                  }
8907                  case 8: {
8908                    bitField0_ |= 0x00000001;
8909                    inodeId_ = input.readUInt64();
8910                    break;
8911                  }
8912                  case 18: {
8913                    bitField0_ |= 0x00000002;
8914                    fullPath_ = input.readBytes();
8915                    break;
8916                  }
8917                }
8918              }
8919            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8920              throw e.setUnfinishedMessage(this);
8921            } catch (java.io.IOException e) {
8922              throw new com.google.protobuf.InvalidProtocolBufferException(
8923                  e.getMessage()).setUnfinishedMessage(this);
8924            } finally {
8925              this.unknownFields = unknownFields.build();
8926              makeExtensionsImmutable();
8927            }
8928          }
8929          public static final com.google.protobuf.Descriptors.Descriptor
8930              getDescriptor() {
8931            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
8932          }
8933    
8934          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8935              internalGetFieldAccessorTable() {
8936            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable
8937                .ensureFieldAccessorsInitialized(
8938                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.Builder.class);
8939          }
8940    
8941          public static com.google.protobuf.Parser<FileUnderConstructionEntry> PARSER =
8942              new com.google.protobuf.AbstractParser<FileUnderConstructionEntry>() {
8943            public FileUnderConstructionEntry parsePartialFrom(
8944                com.google.protobuf.CodedInputStream input,
8945                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8946                throws com.google.protobuf.InvalidProtocolBufferException {
8947              return new FileUnderConstructionEntry(input, extensionRegistry);
8948            }
8949          };
8950    
8951          @java.lang.Override
8952          public com.google.protobuf.Parser<FileUnderConstructionEntry> getParserForType() {
8953            return PARSER;
8954          }
8955    
8956          private int bitField0_;
8957          // optional uint64 inodeId = 1;
8958          public static final int INODEID_FIELD_NUMBER = 1;
8959          private long inodeId_;
8960          /**
8961           * <code>optional uint64 inodeId = 1;</code>
8962           */
8963          public boolean hasInodeId() {
8964            return ((bitField0_ & 0x00000001) == 0x00000001);
8965          }
8966          /**
8967           * <code>optional uint64 inodeId = 1;</code>
8968           */
8969          public long getInodeId() {
8970            return inodeId_;
8971          }
8972    
8973          // optional string fullPath = 2;
8974          public static final int FULLPATH_FIELD_NUMBER = 2;
8975          private java.lang.Object fullPath_;
8976          /**
8977           * <code>optional string fullPath = 2;</code>
8978           */
8979          public boolean hasFullPath() {
8980            return ((bitField0_ & 0x00000002) == 0x00000002);
8981          }
8982          /**
8983           * <code>optional string fullPath = 2;</code>
8984           */
8985          public java.lang.String getFullPath() {
8986            java.lang.Object ref = fullPath_;
8987            if (ref instanceof java.lang.String) {
8988              return (java.lang.String) ref;
8989            } else {
8990              com.google.protobuf.ByteString bs = 
8991                  (com.google.protobuf.ByteString) ref;
8992              java.lang.String s = bs.toStringUtf8();
8993              if (bs.isValidUtf8()) {
8994                fullPath_ = s;
8995              }
8996              return s;
8997            }
8998          }
8999          /**
9000           * <code>optional string fullPath = 2;</code>
9001           */
9002          public com.google.protobuf.ByteString
9003              getFullPathBytes() {
9004            java.lang.Object ref = fullPath_;
9005            if (ref instanceof java.lang.String) {
9006              com.google.protobuf.ByteString b = 
9007                  com.google.protobuf.ByteString.copyFromUtf8(
9008                      (java.lang.String) ref);
9009              fullPath_ = b;
9010              return b;
9011            } else {
9012              return (com.google.protobuf.ByteString) ref;
9013            }
9014          }
9015    
9016          private void initFields() {
9017            inodeId_ = 0L;
9018            fullPath_ = "";
9019          }
9020          private byte memoizedIsInitialized = -1;
9021          public final boolean isInitialized() {
9022            byte isInitialized = memoizedIsInitialized;
9023            if (isInitialized != -1) return isInitialized == 1;
9024    
9025            memoizedIsInitialized = 1;
9026            return true;
9027          }
9028    
9029          public void writeTo(com.google.protobuf.CodedOutputStream output)
9030                              throws java.io.IOException {
9031            getSerializedSize();
9032            if (((bitField0_ & 0x00000001) == 0x00000001)) {
9033              output.writeUInt64(1, inodeId_);
9034            }
9035            if (((bitField0_ & 0x00000002) == 0x00000002)) {
9036              output.writeBytes(2, getFullPathBytes());
9037            }
9038            getUnknownFields().writeTo(output);
9039          }
9040    
9041          private int memoizedSerializedSize = -1;
9042          public int getSerializedSize() {
9043            int size = memoizedSerializedSize;
9044            if (size != -1) return size;
9045    
9046            size = 0;
9047            if (((bitField0_ & 0x00000001) == 0x00000001)) {
9048              size += com.google.protobuf.CodedOutputStream
9049                .computeUInt64Size(1, inodeId_);
9050            }
9051            if (((bitField0_ & 0x00000002) == 0x00000002)) {
9052              size += com.google.protobuf.CodedOutputStream
9053                .computeBytesSize(2, getFullPathBytes());
9054            }
9055            size += getUnknownFields().getSerializedSize();
9056            memoizedSerializedSize = size;
9057            return size;
9058          }
9059    
9060          private static final long serialVersionUID = 0L;
9061          @java.lang.Override
9062          protected java.lang.Object writeReplace()
9063              throws java.io.ObjectStreamException {
9064            return super.writeReplace();
9065          }
9066    
9067          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
9068              com.google.protobuf.ByteString data)
9069              throws com.google.protobuf.InvalidProtocolBufferException {
9070            return PARSER.parseFrom(data);
9071          }
9072          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
9073              com.google.protobuf.ByteString data,
9074              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9075              throws com.google.protobuf.InvalidProtocolBufferException {
9076            return PARSER.parseFrom(data, extensionRegistry);
9077          }
9078          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(byte[] data)
9079              throws com.google.protobuf.InvalidProtocolBufferException {
9080            return PARSER.parseFrom(data);
9081          }
9082          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
9083              byte[] data,
9084              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9085              throws com.google.protobuf.InvalidProtocolBufferException {
9086            return PARSER.parseFrom(data, extensionRegistry);
9087          }
9088          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(java.io.InputStream input)
9089              throws java.io.IOException {
9090            return PARSER.parseFrom(input);
9091          }
9092          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
9093              java.io.InputStream input,
9094              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9095              throws java.io.IOException {
9096            return PARSER.parseFrom(input, extensionRegistry);
9097          }
9098          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseDelimitedFrom(java.io.InputStream input)
9099              throws java.io.IOException {
9100            return PARSER.parseDelimitedFrom(input);
9101          }
9102          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseDelimitedFrom(
9103              java.io.InputStream input,
9104              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9105              throws java.io.IOException {
9106            return PARSER.parseDelimitedFrom(input, extensionRegistry);
9107          }
9108          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
9109              com.google.protobuf.CodedInputStream input)
9110              throws java.io.IOException {
9111            return PARSER.parseFrom(input);
9112          }
9113          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
9114              com.google.protobuf.CodedInputStream input,
9115              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9116              throws java.io.IOException {
9117            return PARSER.parseFrom(input, extensionRegistry);
9118          }
9119    
9120          public static Builder newBuilder() { return Builder.create(); }
9121          public Builder newBuilderForType() { return newBuilder(); }
9122          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry prototype) {
9123            return newBuilder().mergeFrom(prototype);
9124          }
9125          public Builder toBuilder() { return newBuilder(this); }
9126    
9127          @java.lang.Override
9128          protected Builder newBuilderForType(
9129              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9130            Builder builder = new Builder(parent);
9131            return builder;
9132          }
9133          /**
9134           * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry}
9135           */
9136          public static final class Builder extends
9137              com.google.protobuf.GeneratedMessage.Builder<Builder>
9138             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntryOrBuilder {
9139            public static final com.google.protobuf.Descriptors.Descriptor
9140                getDescriptor() {
9141              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
9142            }
9143    
9144            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9145                internalGetFieldAccessorTable() {
9146              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable
9147                  .ensureFieldAccessorsInitialized(
9148                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.Builder.class);
9149            }
9150    
9151            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.newBuilder()
9152            private Builder() {
9153              maybeForceBuilderInitialization();
9154            }
9155    
9156            private Builder(
9157                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9158              super(parent);
9159              maybeForceBuilderInitialization();
9160            }
9161            private void maybeForceBuilderInitialization() {
9162              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9163              }
9164            }
9165            private static Builder create() {
9166              return new Builder();
9167            }
9168    
9169            public Builder clear() {
9170              super.clear();
9171              inodeId_ = 0L;
9172              bitField0_ = (bitField0_ & ~0x00000001);
9173              fullPath_ = "";
9174              bitField0_ = (bitField0_ & ~0x00000002);
9175              return this;
9176            }
9177    
9178            public Builder clone() {
9179              return create().mergeFrom(buildPartial());
9180            }
9181    
9182            public com.google.protobuf.Descriptors.Descriptor
9183                getDescriptorForType() {
9184              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
9185            }
9186    
9187            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry getDefaultInstanceForType() {
9188              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.getDefaultInstance();
9189            }
9190    
9191            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry build() {
9192              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry result = buildPartial();
9193              if (!result.isInitialized()) {
9194                throw newUninitializedMessageException(result);
9195              }
9196              return result;
9197            }
9198    
9199            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry buildPartial() {
9200              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry(this);
9201              int from_bitField0_ = bitField0_;
9202              int to_bitField0_ = 0;
9203              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9204                to_bitField0_ |= 0x00000001;
9205              }
9206              result.inodeId_ = inodeId_;
9207              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
9208                to_bitField0_ |= 0x00000002;
9209              }
9210              result.fullPath_ = fullPath_;
9211              result.bitField0_ = to_bitField0_;
9212              onBuilt();
9213              return result;
9214            }
9215    
9216            public Builder mergeFrom(com.google.protobuf.Message other) {
9217              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry) {
9218                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry)other);
9219              } else {
9220                super.mergeFrom(other);
9221                return this;
9222              }
9223            }
9224    
9225            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry other) {
9226              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.getDefaultInstance()) return this;
9227              if (other.hasInodeId()) {
9228                setInodeId(other.getInodeId());
9229              }
9230              if (other.hasFullPath()) {
9231                bitField0_ |= 0x00000002;
9232                fullPath_ = other.fullPath_;
9233                onChanged();
9234              }
9235              this.mergeUnknownFields(other.getUnknownFields());
9236              return this;
9237            }
9238    
9239            public final boolean isInitialized() {
9240              return true;
9241            }
9242    
9243            public Builder mergeFrom(
9244                com.google.protobuf.CodedInputStream input,
9245                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9246                throws java.io.IOException {
9247              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parsedMessage = null;
9248              try {
9249                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9250              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9251                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry) e.getUnfinishedMessage();
9252                throw e;
9253              } finally {
9254                if (parsedMessage != null) {
9255                  mergeFrom(parsedMessage);
9256                }
9257              }
9258              return this;
9259            }
9260            private int bitField0_;
9261    
9262            // optional uint64 inodeId = 1;
9263            private long inodeId_ ;
9264            /**
9265             * <code>optional uint64 inodeId = 1;</code>
9266             */
9267            public boolean hasInodeId() {
9268              return ((bitField0_ & 0x00000001) == 0x00000001);
9269            }
9270            /**
9271             * <code>optional uint64 inodeId = 1;</code>
9272             */
9273            public long getInodeId() {
9274              return inodeId_;
9275            }
9276            /**
9277             * <code>optional uint64 inodeId = 1;</code>
9278             */
9279            public Builder setInodeId(long value) {
9280              bitField0_ |= 0x00000001;
9281              inodeId_ = value;
9282              onChanged();
9283              return this;
9284            }
9285            /**
9286             * <code>optional uint64 inodeId = 1;</code>
9287             */
9288            public Builder clearInodeId() {
9289              bitField0_ = (bitField0_ & ~0x00000001);
9290              inodeId_ = 0L;
9291              onChanged();
9292              return this;
9293            }
9294    
9295            // optional string fullPath = 2;
9296            private java.lang.Object fullPath_ = "";
9297            /**
9298             * <code>optional string fullPath = 2;</code>
9299             */
9300            public boolean hasFullPath() {
9301              return ((bitField0_ & 0x00000002) == 0x00000002);
9302            }
9303            /**
9304             * <code>optional string fullPath = 2;</code>
9305             */
9306            public java.lang.String getFullPath() {
9307              java.lang.Object ref = fullPath_;
9308              if (!(ref instanceof java.lang.String)) {
9309                java.lang.String s = ((com.google.protobuf.ByteString) ref)
9310                    .toStringUtf8();
9311                fullPath_ = s;
9312                return s;
9313              } else {
9314                return (java.lang.String) ref;
9315              }
9316            }
9317            /**
9318             * <code>optional string fullPath = 2;</code>
9319             */
9320            public com.google.protobuf.ByteString
9321                getFullPathBytes() {
9322              java.lang.Object ref = fullPath_;
9323              if (ref instanceof String) {
9324                com.google.protobuf.ByteString b = 
9325                    com.google.protobuf.ByteString.copyFromUtf8(
9326                        (java.lang.String) ref);
9327                fullPath_ = b;
9328                return b;
9329              } else {
9330                return (com.google.protobuf.ByteString) ref;
9331              }
9332            }
9333            /**
9334             * <code>optional string fullPath = 2;</code>
9335             */
9336            public Builder setFullPath(
9337                java.lang.String value) {
9338              if (value == null) {
9339        throw new NullPointerException();
9340      }
9341      bitField0_ |= 0x00000002;
9342              fullPath_ = value;
9343              onChanged();
9344              return this;
9345            }
9346            /**
9347             * <code>optional string fullPath = 2;</code>
9348             */
9349            public Builder clearFullPath() {
9350              bitField0_ = (bitField0_ & ~0x00000002);
9351              fullPath_ = getDefaultInstance().getFullPath();
9352              onChanged();
9353              return this;
9354            }
9355            /**
9356             * <code>optional string fullPath = 2;</code>
9357             */
9358            public Builder setFullPathBytes(
9359                com.google.protobuf.ByteString value) {
9360              if (value == null) {
9361        throw new NullPointerException();
9362      }
9363      bitField0_ |= 0x00000002;
9364              fullPath_ = value;
9365              onChanged();
9366              return this;
9367            }
9368    
9369            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry)
9370          }
9371    
9372          static {
9373            defaultInstance = new FileUnderConstructionEntry(true);
9374            defaultInstance.initFields();
9375          }
9376    
9377          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry)
9378        }
9379    
9380        private void initFields() {
9381        }
9382        private byte memoizedIsInitialized = -1;
9383        public final boolean isInitialized() {
9384          byte isInitialized = memoizedIsInitialized;
9385          if (isInitialized != -1) return isInitialized == 1;
9386    
9387          memoizedIsInitialized = 1;
9388          return true;
9389        }
9390    
9391        public void writeTo(com.google.protobuf.CodedOutputStream output)
9392                            throws java.io.IOException {
9393          getSerializedSize();
9394          getUnknownFields().writeTo(output);
9395        }
9396    
9397        private int memoizedSerializedSize = -1;
9398        public int getSerializedSize() {
9399          int size = memoizedSerializedSize;
9400          if (size != -1) return size;
9401    
9402          size = 0;
9403          size += getUnknownFields().getSerializedSize();
9404          memoizedSerializedSize = size;
9405          return size;
9406        }
9407    
9408        private static final long serialVersionUID = 0L;
9409        @java.lang.Override
9410        protected java.lang.Object writeReplace()
9411            throws java.io.ObjectStreamException {
9412          return super.writeReplace();
9413        }
9414    
9415        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
9416            com.google.protobuf.ByteString data)
9417            throws com.google.protobuf.InvalidProtocolBufferException {
9418          return PARSER.parseFrom(data);
9419        }
9420        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
9421            com.google.protobuf.ByteString data,
9422            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9423            throws com.google.protobuf.InvalidProtocolBufferException {
9424          return PARSER.parseFrom(data, extensionRegistry);
9425        }
9426        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(byte[] data)
9427            throws com.google.protobuf.InvalidProtocolBufferException {
9428          return PARSER.parseFrom(data);
9429        }
9430        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
9431            byte[] data,
9432            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9433            throws com.google.protobuf.InvalidProtocolBufferException {
9434          return PARSER.parseFrom(data, extensionRegistry);
9435        }
9436        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(java.io.InputStream input)
9437            throws java.io.IOException {
9438          return PARSER.parseFrom(input);
9439        }
9440        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
9441            java.io.InputStream input,
9442            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9443            throws java.io.IOException {
9444          return PARSER.parseFrom(input, extensionRegistry);
9445        }
9446        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseDelimitedFrom(java.io.InputStream input)
9447            throws java.io.IOException {
9448          return PARSER.parseDelimitedFrom(input);
9449        }
9450        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseDelimitedFrom(
9451            java.io.InputStream input,
9452            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9453            throws java.io.IOException {
9454          return PARSER.parseDelimitedFrom(input, extensionRegistry);
9455        }
9456        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
9457            com.google.protobuf.CodedInputStream input)
9458            throws java.io.IOException {
9459          return PARSER.parseFrom(input);
9460        }
9461        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
9462            com.google.protobuf.CodedInputStream input,
9463            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9464            throws java.io.IOException {
9465          return PARSER.parseFrom(input, extensionRegistry);
9466        }
9467    
9468        public static Builder newBuilder() { return Builder.create(); }
9469        public Builder newBuilderForType() { return newBuilder(); }
9470        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection prototype) {
9471          return newBuilder().mergeFrom(prototype);
9472        }
9473        public Builder toBuilder() { return newBuilder(this); }
9474    
9475        @java.lang.Override
9476        protected Builder newBuilderForType(
9477            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9478          Builder builder = new Builder(parent);
9479          return builder;
9480        }
9481        /**
9482         * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection}
9483         *
9484         * <pre>
9485         **
9486         * This section records information about under-construction files for
9487         * reconstructing the lease map.
9488         * NAME: FILES_UNDERCONSTRUCTION
9489         * </pre>
9490         */
9491        public static final class Builder extends
9492            com.google.protobuf.GeneratedMessage.Builder<Builder>
9493           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSectionOrBuilder {
9494          public static final com.google.protobuf.Descriptors.Descriptor
9495              getDescriptor() {
9496            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
9497          }
9498    
9499          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9500              internalGetFieldAccessorTable() {
9501            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable
9502                .ensureFieldAccessorsInitialized(
9503                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.Builder.class);
9504          }
9505    
9506          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.newBuilder()
9507          private Builder() {
9508            maybeForceBuilderInitialization();
9509          }
9510    
9511          private Builder(
9512              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9513            super(parent);
9514            maybeForceBuilderInitialization();
9515          }
9516          private void maybeForceBuilderInitialization() {
9517            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9518            }
9519          }
9520          private static Builder create() {
9521            return new Builder();
9522          }
9523    
9524          public Builder clear() {
9525            super.clear();
9526            return this;
9527          }
9528    
9529          public Builder clone() {
9530            return create().mergeFrom(buildPartial());
9531          }
9532    
9533          public com.google.protobuf.Descriptors.Descriptor
9534              getDescriptorForType() {
9535            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
9536          }
9537    
9538          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection getDefaultInstanceForType() {
9539            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.getDefaultInstance();
9540          }
9541    
9542          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection build() {
9543            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection result = buildPartial();
9544            if (!result.isInitialized()) {
9545              throw newUninitializedMessageException(result);
9546            }
9547            return result;
9548          }
9549    
9550          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection buildPartial() {
9551            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection(this);
9552            onBuilt();
9553            return result;
9554          }
9555    
9556          public Builder mergeFrom(com.google.protobuf.Message other) {
9557            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection) {
9558              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection)other);
9559            } else {
9560              super.mergeFrom(other);
9561              return this;
9562            }
9563          }
9564    
9565          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection other) {
9566            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.getDefaultInstance()) return this;
9567            this.mergeUnknownFields(other.getUnknownFields());
9568            return this;
9569          }
9570    
9571          public final boolean isInitialized() {
9572            return true;
9573          }
9574    
9575          public Builder mergeFrom(
9576              com.google.protobuf.CodedInputStream input,
9577              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9578              throws java.io.IOException {
9579            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parsedMessage = null;
9580            try {
9581              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9582            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9583              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection) e.getUnfinishedMessage();
9584              throw e;
9585            } finally {
9586              if (parsedMessage != null) {
9587                mergeFrom(parsedMessage);
9588              }
9589            }
9590            return this;
9591          }
9592    
9593          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection)
9594        }
9595    
9596        static {
9597          defaultInstance = new FilesUnderConstructionSection(true);
9598          defaultInstance.initFields();
9599        }
9600    
9601        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection)
9602      }
9603    
9604      public interface INodeDirectorySectionOrBuilder
9605          extends com.google.protobuf.MessageOrBuilder {
9606      }
9607      /**
9608       * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection}
9609       *
9610       * <pre>
9611       **
9612       * This section records the children of each directories
9613       * NAME: INODE_DIR
9614       * </pre>
9615       */
9616      public static final class INodeDirectorySection extends
9617          com.google.protobuf.GeneratedMessage
9618          implements INodeDirectorySectionOrBuilder {
9619        // Use INodeDirectorySection.newBuilder() to construct.
9620        private INodeDirectorySection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
9621          super(builder);
9622          this.unknownFields = builder.getUnknownFields();
9623        }
9624        private INodeDirectorySection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
9625    
9626        private static final INodeDirectorySection defaultInstance;
9627        public static INodeDirectorySection getDefaultInstance() {
9628          return defaultInstance;
9629        }
9630    
9631        public INodeDirectorySection getDefaultInstanceForType() {
9632          return defaultInstance;
9633        }
9634    
9635        private final com.google.protobuf.UnknownFieldSet unknownFields;
9636        @java.lang.Override
9637        public final com.google.protobuf.UnknownFieldSet
9638            getUnknownFields() {
9639          return this.unknownFields;
9640        }
9641        private INodeDirectorySection(
9642            com.google.protobuf.CodedInputStream input,
9643            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9644            throws com.google.protobuf.InvalidProtocolBufferException {
9645          initFields();
9646          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
9647              com.google.protobuf.UnknownFieldSet.newBuilder();
9648          try {
9649            boolean done = false;
9650            while (!done) {
9651              int tag = input.readTag();
9652              switch (tag) {
9653                case 0:
9654                  done = true;
9655                  break;
9656                default: {
9657                  if (!parseUnknownField(input, unknownFields,
9658                                         extensionRegistry, tag)) {
9659                    done = true;
9660                  }
9661                  break;
9662                }
9663              }
9664            }
9665          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9666            throw e.setUnfinishedMessage(this);
9667          } catch (java.io.IOException e) {
9668            throw new com.google.protobuf.InvalidProtocolBufferException(
9669                e.getMessage()).setUnfinishedMessage(this);
9670          } finally {
9671            this.unknownFields = unknownFields.build();
9672            makeExtensionsImmutable();
9673          }
9674        }
9675        public static final com.google.protobuf.Descriptors.Descriptor
9676            getDescriptor() {
9677          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
9678        }
9679    
9680        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9681            internalGetFieldAccessorTable() {
9682          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable
9683              .ensureFieldAccessorsInitialized(
9684                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.Builder.class);
9685        }
9686    
9687        public static com.google.protobuf.Parser<INodeDirectorySection> PARSER =
9688            new com.google.protobuf.AbstractParser<INodeDirectorySection>() {
9689          public INodeDirectorySection parsePartialFrom(
9690              com.google.protobuf.CodedInputStream input,
9691              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9692              throws com.google.protobuf.InvalidProtocolBufferException {
9693            return new INodeDirectorySection(input, extensionRegistry);
9694          }
9695        };
9696    
9697        @java.lang.Override
9698        public com.google.protobuf.Parser<INodeDirectorySection> getParserForType() {
9699          return PARSER;
9700        }
9701    
9702        public interface DirEntryOrBuilder
9703            extends com.google.protobuf.MessageOrBuilder {
9704    
9705          // optional uint64 parent = 1;
9706          /**
9707           * <code>optional uint64 parent = 1;</code>
9708           */
9709          boolean hasParent();
9710          /**
9711           * <code>optional uint64 parent = 1;</code>
9712           */
9713          long getParent();
9714    
9715          // repeated uint64 children = 2 [packed = true];
9716          /**
9717           * <code>repeated uint64 children = 2 [packed = true];</code>
9718           *
9719           * <pre>
9720           * children that are not reference nodes
9721           * </pre>
9722           */
9723          java.util.List<java.lang.Long> getChildrenList();
9724          /**
9725           * <code>repeated uint64 children = 2 [packed = true];</code>
9726           *
9727           * <pre>
9728           * children that are not reference nodes
9729           * </pre>
9730           */
9731          int getChildrenCount();
9732          /**
9733           * <code>repeated uint64 children = 2 [packed = true];</code>
9734           *
9735           * <pre>
9736           * children that are not reference nodes
9737           * </pre>
9738           */
9739          long getChildren(int index);
9740    
9741          // repeated uint32 refChildren = 3 [packed = true];
9742          /**
9743           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
9744           *
9745           * <pre>
9746           * children that are reference nodes, each element is a reference node id
9747           * </pre>
9748           */
9749          java.util.List<java.lang.Integer> getRefChildrenList();
9750          /**
9751           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
9752           *
9753           * <pre>
9754           * children that are reference nodes, each element is a reference node id
9755           * </pre>
9756           */
9757          int getRefChildrenCount();
9758          /**
9759           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
9760           *
9761           * <pre>
9762           * children that are reference nodes, each element is a reference node id
9763           * </pre>
9764           */
9765          int getRefChildren(int index);
9766        }
9767        /**
9768         * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry}
9769         *
9770         * <pre>
9771         **
9772         * A single DirEntry needs to fit in the default PB max message size of
9773         * 64MB. Please be careful when adding more fields to a DirEntry!
9774         * </pre>
9775         */
9776        public static final class DirEntry extends
9777            com.google.protobuf.GeneratedMessage
9778            implements DirEntryOrBuilder {
9779          // Use DirEntry.newBuilder() to construct.
9780          private DirEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
9781            super(builder);
9782            this.unknownFields = builder.getUnknownFields();
9783          }
9784          private DirEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
9785    
9786          private static final DirEntry defaultInstance;
9787          public static DirEntry getDefaultInstance() {
9788            return defaultInstance;
9789          }
9790    
9791          public DirEntry getDefaultInstanceForType() {
9792            return defaultInstance;
9793          }
9794    
9795          private final com.google.protobuf.UnknownFieldSet unknownFields;
9796          @java.lang.Override
9797          public final com.google.protobuf.UnknownFieldSet
9798              getUnknownFields() {
9799            return this.unknownFields;
9800          }
9801          private DirEntry(
9802              com.google.protobuf.CodedInputStream input,
9803              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9804              throws com.google.protobuf.InvalidProtocolBufferException {
9805            initFields();
9806            int mutable_bitField0_ = 0;
9807            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
9808                com.google.protobuf.UnknownFieldSet.newBuilder();
9809            try {
9810              boolean done = false;
9811              while (!done) {
9812                int tag = input.readTag();
9813                switch (tag) {
9814                  case 0:
9815                    done = true;
9816                    break;
9817                  default: {
9818                    if (!parseUnknownField(input, unknownFields,
9819                                           extensionRegistry, tag)) {
9820                      done = true;
9821                    }
9822                    break;
9823                  }
9824                  case 8: {
9825                    bitField0_ |= 0x00000001;
9826                    parent_ = input.readUInt64();
9827                    break;
9828                  }
9829                  case 16: {
9830                    if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
9831                      children_ = new java.util.ArrayList<java.lang.Long>();
9832                      mutable_bitField0_ |= 0x00000002;
9833                    }
9834                    children_.add(input.readUInt64());
9835                    break;
9836                  }
9837                  case 18: {
9838                    int length = input.readRawVarint32();
9839                    int limit = input.pushLimit(length);
9840                    if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) {
9841                      children_ = new java.util.ArrayList<java.lang.Long>();
9842                      mutable_bitField0_ |= 0x00000002;
9843                    }
9844                    while (input.getBytesUntilLimit() > 0) {
9845                      children_.add(input.readUInt64());
9846                    }
9847                    input.popLimit(limit);
9848                    break;
9849                  }
9850                  case 24: {
9851                    if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
9852                      refChildren_ = new java.util.ArrayList<java.lang.Integer>();
9853                      mutable_bitField0_ |= 0x00000004;
9854                    }
9855                    refChildren_.add(input.readUInt32());
9856                    break;
9857                  }
9858                  case 26: {
9859                    int length = input.readRawVarint32();
9860                    int limit = input.pushLimit(length);
9861                    if (!((mutable_bitField0_ & 0x00000004) == 0x00000004) && input.getBytesUntilLimit() > 0) {
9862                      refChildren_ = new java.util.ArrayList<java.lang.Integer>();
9863                      mutable_bitField0_ |= 0x00000004;
9864                    }
9865                    while (input.getBytesUntilLimit() > 0) {
9866                      refChildren_.add(input.readUInt32());
9867                    }
9868                    input.popLimit(limit);
9869                    break;
9870                  }
9871                }
9872              }
9873            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9874              throw e.setUnfinishedMessage(this);
9875            } catch (java.io.IOException e) {
9876              throw new com.google.protobuf.InvalidProtocolBufferException(
9877                  e.getMessage()).setUnfinishedMessage(this);
9878            } finally {
9879              if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
9880                children_ = java.util.Collections.unmodifiableList(children_);
9881              }
9882              if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
9883                refChildren_ = java.util.Collections.unmodifiableList(refChildren_);
9884              }
9885              this.unknownFields = unknownFields.build();
9886              makeExtensionsImmutable();
9887            }
9888          }
9889          public static final com.google.protobuf.Descriptors.Descriptor
9890              getDescriptor() {
9891            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
9892          }
9893    
9894          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9895              internalGetFieldAccessorTable() {
9896            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable
9897                .ensureFieldAccessorsInitialized(
9898                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.Builder.class);
9899          }
9900    
9901          public static com.google.protobuf.Parser<DirEntry> PARSER =
9902              new com.google.protobuf.AbstractParser<DirEntry>() {
9903            public DirEntry parsePartialFrom(
9904                com.google.protobuf.CodedInputStream input,
9905                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9906                throws com.google.protobuf.InvalidProtocolBufferException {
9907              return new DirEntry(input, extensionRegistry);
9908            }
9909          };
9910    
9911          @java.lang.Override
9912          public com.google.protobuf.Parser<DirEntry> getParserForType() {
9913            return PARSER;
9914          }
9915    
9916          private int bitField0_;
9917          // optional uint64 parent = 1;
9918          public static final int PARENT_FIELD_NUMBER = 1;
9919          private long parent_;
9920          /**
9921           * <code>optional uint64 parent = 1;</code>
9922           */
9923          public boolean hasParent() {
9924            return ((bitField0_ & 0x00000001) == 0x00000001);
9925          }
9926          /**
9927           * <code>optional uint64 parent = 1;</code>
9928           */
9929          public long getParent() {
9930            return parent_;
9931          }
9932    
9933          // repeated uint64 children = 2 [packed = true];
9934          public static final int CHILDREN_FIELD_NUMBER = 2;
9935          private java.util.List<java.lang.Long> children_;
9936          /**
9937           * <code>repeated uint64 children = 2 [packed = true];</code>
9938           *
9939           * <pre>
9940           * children that are not reference nodes
9941           * </pre>
9942           */
9943          public java.util.List<java.lang.Long>
9944              getChildrenList() {
9945            return children_;
9946          }
9947          /**
9948           * <code>repeated uint64 children = 2 [packed = true];</code>
9949           *
9950           * <pre>
9951           * children that are not reference nodes
9952           * </pre>
9953           */
9954          public int getChildrenCount() {
9955            return children_.size();
9956          }
9957          /**
9958           * <code>repeated uint64 children = 2 [packed = true];</code>
9959           *
9960           * <pre>
9961           * children that are not reference nodes
9962           * </pre>
9963           */
9964          public long getChildren(int index) {
9965            return children_.get(index);
9966          }
9967          private int childrenMemoizedSerializedSize = -1;
9968    
9969          // repeated uint32 refChildren = 3 [packed = true];
9970          public static final int REFCHILDREN_FIELD_NUMBER = 3;
9971          private java.util.List<java.lang.Integer> refChildren_;
9972          /**
9973           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
9974           *
9975           * <pre>
9976           * children that are reference nodes, each element is a reference node id
9977           * </pre>
9978           */
9979          public java.util.List<java.lang.Integer>
9980              getRefChildrenList() {
9981            return refChildren_;
9982          }
9983          /**
9984           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
9985           *
9986           * <pre>
9987           * children that are reference nodes, each element is a reference node id
9988           * </pre>
9989           */
9990          public int getRefChildrenCount() {
9991            return refChildren_.size();
9992          }
9993          /**
9994           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
9995           *
9996           * <pre>
9997           * children that are reference nodes, each element is a reference node id
9998           * </pre>
9999           */
10000          public int getRefChildren(int index) {
10001            return refChildren_.get(index);
10002          }
10003          private int refChildrenMemoizedSerializedSize = -1;
10004    
10005          private void initFields() {
10006            parent_ = 0L;
10007            children_ = java.util.Collections.emptyList();
10008            refChildren_ = java.util.Collections.emptyList();
10009          }
10010          private byte memoizedIsInitialized = -1;
10011          public final boolean isInitialized() {
10012            byte isInitialized = memoizedIsInitialized;
10013            if (isInitialized != -1) return isInitialized == 1;
10014    
10015            memoizedIsInitialized = 1;
10016            return true;
10017          }
10018    
10019          public void writeTo(com.google.protobuf.CodedOutputStream output)
10020                              throws java.io.IOException {
10021            getSerializedSize();
10022            if (((bitField0_ & 0x00000001) == 0x00000001)) {
10023              output.writeUInt64(1, parent_);
10024            }
10025            if (getChildrenList().size() > 0) {
10026              output.writeRawVarint32(18);
10027              output.writeRawVarint32(childrenMemoizedSerializedSize);
10028            }
10029            for (int i = 0; i < children_.size(); i++) {
10030              output.writeUInt64NoTag(children_.get(i));
10031            }
10032            if (getRefChildrenList().size() > 0) {
10033              output.writeRawVarint32(26);
10034              output.writeRawVarint32(refChildrenMemoizedSerializedSize);
10035            }
10036            for (int i = 0; i < refChildren_.size(); i++) {
10037              output.writeUInt32NoTag(refChildren_.get(i));
10038            }
10039            getUnknownFields().writeTo(output);
10040          }
10041    
10042          private int memoizedSerializedSize = -1;
10043          public int getSerializedSize() {
10044            int size = memoizedSerializedSize;
10045            if (size != -1) return size;
10046    
10047            size = 0;
10048            if (((bitField0_ & 0x00000001) == 0x00000001)) {
10049              size += com.google.protobuf.CodedOutputStream
10050                .computeUInt64Size(1, parent_);
10051            }
10052            {
10053              int dataSize = 0;
10054              for (int i = 0; i < children_.size(); i++) {
10055                dataSize += com.google.protobuf.CodedOutputStream
10056                  .computeUInt64SizeNoTag(children_.get(i));
10057              }
10058              size += dataSize;
10059              if (!getChildrenList().isEmpty()) {
10060                size += 1;
10061                size += com.google.protobuf.CodedOutputStream
10062                    .computeInt32SizeNoTag(dataSize);
10063              }
10064              childrenMemoizedSerializedSize = dataSize;
10065            }
10066            {
10067              int dataSize = 0;
10068              for (int i = 0; i < refChildren_.size(); i++) {
10069                dataSize += com.google.protobuf.CodedOutputStream
10070                  .computeUInt32SizeNoTag(refChildren_.get(i));
10071              }
10072              size += dataSize;
10073              if (!getRefChildrenList().isEmpty()) {
10074                size += 1;
10075                size += com.google.protobuf.CodedOutputStream
10076                    .computeInt32SizeNoTag(dataSize);
10077              }
10078              refChildrenMemoizedSerializedSize = dataSize;
10079            }
10080            size += getUnknownFields().getSerializedSize();
10081            memoizedSerializedSize = size;
10082            return size;
10083          }
10084    
10085          private static final long serialVersionUID = 0L;
10086          @java.lang.Override
10087          protected java.lang.Object writeReplace()
10088              throws java.io.ObjectStreamException {
10089            return super.writeReplace();
10090          }
10091    
10092          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
10093              com.google.protobuf.ByteString data)
10094              throws com.google.protobuf.InvalidProtocolBufferException {
10095            return PARSER.parseFrom(data);
10096          }
10097          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
10098              com.google.protobuf.ByteString data,
10099              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10100              throws com.google.protobuf.InvalidProtocolBufferException {
10101            return PARSER.parseFrom(data, extensionRegistry);
10102          }
10103          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(byte[] data)
10104              throws com.google.protobuf.InvalidProtocolBufferException {
10105            return PARSER.parseFrom(data);
10106          }
10107          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
10108              byte[] data,
10109              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10110              throws com.google.protobuf.InvalidProtocolBufferException {
10111            return PARSER.parseFrom(data, extensionRegistry);
10112          }
10113          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(java.io.InputStream input)
10114              throws java.io.IOException {
10115            return PARSER.parseFrom(input);
10116          }
10117          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
10118              java.io.InputStream input,
10119              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10120              throws java.io.IOException {
10121            return PARSER.parseFrom(input, extensionRegistry);
10122          }
10123          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseDelimitedFrom(java.io.InputStream input)
10124              throws java.io.IOException {
10125            return PARSER.parseDelimitedFrom(input);
10126          }
10127          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseDelimitedFrom(
10128              java.io.InputStream input,
10129              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10130              throws java.io.IOException {
10131            return PARSER.parseDelimitedFrom(input, extensionRegistry);
10132          }
10133          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
10134              com.google.protobuf.CodedInputStream input)
10135              throws java.io.IOException {
10136            return PARSER.parseFrom(input);
10137          }
10138          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
10139              com.google.protobuf.CodedInputStream input,
10140              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10141              throws java.io.IOException {
10142            return PARSER.parseFrom(input, extensionRegistry);
10143          }
10144    
10145          public static Builder newBuilder() { return Builder.create(); }
10146          public Builder newBuilderForType() { return newBuilder(); }
10147          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry prototype) {
10148            return newBuilder().mergeFrom(prototype);
10149          }
10150          public Builder toBuilder() { return newBuilder(this); }
10151    
10152          @java.lang.Override
10153          protected Builder newBuilderForType(
10154              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10155            Builder builder = new Builder(parent);
10156            return builder;
10157          }
10158          /**
10159           * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry}
10160           *
10161           * <pre>
10162           **
10163           * A single DirEntry needs to fit in the default PB max message size of
10164           * 64MB. Please be careful when adding more fields to a DirEntry!
10165           * </pre>
10166           */
10167          public static final class Builder extends
10168              com.google.protobuf.GeneratedMessage.Builder<Builder>
10169             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntryOrBuilder {
10170            public static final com.google.protobuf.Descriptors.Descriptor
10171                getDescriptor() {
10172              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
10173            }
10174    
10175            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10176                internalGetFieldAccessorTable() {
10177              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable
10178                  .ensureFieldAccessorsInitialized(
10179                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.Builder.class);
10180            }
10181    
10182            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.newBuilder()
10183            private Builder() {
10184              maybeForceBuilderInitialization();
10185            }
10186    
10187            private Builder(
10188                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10189              super(parent);
10190              maybeForceBuilderInitialization();
10191            }
10192            private void maybeForceBuilderInitialization() {
10193              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10194              }
10195            }
10196            private static Builder create() {
10197              return new Builder();
10198            }
10199    
10200            public Builder clear() {
10201              super.clear();
10202              parent_ = 0L;
10203              bitField0_ = (bitField0_ & ~0x00000001);
10204              children_ = java.util.Collections.emptyList();
10205              bitField0_ = (bitField0_ & ~0x00000002);
10206              refChildren_ = java.util.Collections.emptyList();
10207              bitField0_ = (bitField0_ & ~0x00000004);
10208              return this;
10209            }
10210    
10211            public Builder clone() {
10212              return create().mergeFrom(buildPartial());
10213            }
10214    
10215            public com.google.protobuf.Descriptors.Descriptor
10216                getDescriptorForType() {
10217              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
10218            }
10219    
10220            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry getDefaultInstanceForType() {
10221              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.getDefaultInstance();
10222            }
10223    
10224            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry build() {
10225              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry result = buildPartial();
10226              if (!result.isInitialized()) {
10227                throw newUninitializedMessageException(result);
10228              }
10229              return result;
10230            }
10231    
10232            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry buildPartial() {
10233              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry(this);
10234              int from_bitField0_ = bitField0_;
10235              int to_bitField0_ = 0;
10236              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10237                to_bitField0_ |= 0x00000001;
10238              }
10239              result.parent_ = parent_;
10240              if (((bitField0_ & 0x00000002) == 0x00000002)) {
10241                children_ = java.util.Collections.unmodifiableList(children_);
10242                bitField0_ = (bitField0_ & ~0x00000002);
10243              }
10244              result.children_ = children_;
10245              if (((bitField0_ & 0x00000004) == 0x00000004)) {
10246                refChildren_ = java.util.Collections.unmodifiableList(refChildren_);
10247                bitField0_ = (bitField0_ & ~0x00000004);
10248              }
10249              result.refChildren_ = refChildren_;
10250              result.bitField0_ = to_bitField0_;
10251              onBuilt();
10252              return result;
10253            }
10254    
10255            public Builder mergeFrom(com.google.protobuf.Message other) {
10256              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry) {
10257                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry)other);
10258              } else {
10259                super.mergeFrom(other);
10260                return this;
10261              }
10262            }
10263    
10264            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry other) {
10265              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.getDefaultInstance()) return this;
10266              if (other.hasParent()) {
10267                setParent(other.getParent());
10268              }
10269              if (!other.children_.isEmpty()) {
10270                if (children_.isEmpty()) {
10271                  children_ = other.children_;
10272                  bitField0_ = (bitField0_ & ~0x00000002);
10273                } else {
10274                  ensureChildrenIsMutable();
10275                  children_.addAll(other.children_);
10276                }
10277                onChanged();
10278              }
10279              if (!other.refChildren_.isEmpty()) {
10280                if (refChildren_.isEmpty()) {
10281                  refChildren_ = other.refChildren_;
10282                  bitField0_ = (bitField0_ & ~0x00000004);
10283                } else {
10284                  ensureRefChildrenIsMutable();
10285                  refChildren_.addAll(other.refChildren_);
10286                }
10287                onChanged();
10288              }
10289              this.mergeUnknownFields(other.getUnknownFields());
10290              return this;
10291            }
10292    
10293            public final boolean isInitialized() {
10294              return true;
10295            }
10296    
10297            public Builder mergeFrom(
10298                com.google.protobuf.CodedInputStream input,
10299                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10300                throws java.io.IOException {
10301              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parsedMessage = null;
10302              try {
10303                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10304              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10305                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry) e.getUnfinishedMessage();
10306                throw e;
10307              } finally {
10308                if (parsedMessage != null) {
10309                  mergeFrom(parsedMessage);
10310                }
10311              }
10312              return this;
10313            }
10314            private int bitField0_;
10315    
10316            // optional uint64 parent = 1;
10317            private long parent_ ;
10318            /**
10319             * <code>optional uint64 parent = 1;</code>
10320             */
10321            public boolean hasParent() {
10322              return ((bitField0_ & 0x00000001) == 0x00000001);
10323            }
10324            /**
10325             * <code>optional uint64 parent = 1;</code>
10326             */
10327            public long getParent() {
10328              return parent_;
10329            }
10330            /**
10331             * <code>optional uint64 parent = 1;</code>
10332             */
10333            public Builder setParent(long value) {
10334              bitField0_ |= 0x00000001;
10335              parent_ = value;
10336              onChanged();
10337              return this;
10338            }
10339            /**
10340             * <code>optional uint64 parent = 1;</code>
10341             */
10342            public Builder clearParent() {
10343              bitField0_ = (bitField0_ & ~0x00000001);
10344              parent_ = 0L;
10345              onChanged();
10346              return this;
10347            }
10348    
10349            // repeated uint64 children = 2 [packed = true];
10350            private java.util.List<java.lang.Long> children_ = java.util.Collections.emptyList();
10351            private void ensureChildrenIsMutable() {
10352              if (!((bitField0_ & 0x00000002) == 0x00000002)) {
10353                children_ = new java.util.ArrayList<java.lang.Long>(children_);
10354                bitField0_ |= 0x00000002;
10355               }
10356            }
10357            /**
10358             * <code>repeated uint64 children = 2 [packed = true];</code>
10359             *
10360             * <pre>
10361             * children that are not reference nodes
10362             * </pre>
10363             */
10364            public java.util.List<java.lang.Long>
10365                getChildrenList() {
10366              return java.util.Collections.unmodifiableList(children_);
10367            }
10368            /**
10369             * <code>repeated uint64 children = 2 [packed = true];</code>
10370             *
10371             * <pre>
10372             * children that are not reference nodes
10373             * </pre>
10374             */
10375            public int getChildrenCount() {
10376              return children_.size();
10377            }
10378            /**
10379             * <code>repeated uint64 children = 2 [packed = true];</code>
10380             *
10381             * <pre>
10382             * children that are not reference nodes
10383             * </pre>
10384             */
10385            public long getChildren(int index) {
10386              return children_.get(index);
10387            }
10388            /**
10389             * <code>repeated uint64 children = 2 [packed = true];</code>
10390             *
10391             * <pre>
10392             * children that are not reference nodes
10393             * </pre>
10394             */
10395            public Builder setChildren(
10396                int index, long value) {
10397              ensureChildrenIsMutable();
10398              children_.set(index, value);
10399              onChanged();
10400              return this;
10401            }
10402            /**
10403             * <code>repeated uint64 children = 2 [packed = true];</code>
10404             *
10405             * <pre>
10406             * children that are not reference nodes
10407             * </pre>
10408             */
10409            public Builder addChildren(long value) {
10410              ensureChildrenIsMutable();
10411              children_.add(value);
10412              onChanged();
10413              return this;
10414            }
10415            /**
10416             * <code>repeated uint64 children = 2 [packed = true];</code>
10417             *
10418             * <pre>
10419             * children that are not reference nodes
10420             * </pre>
10421             */
10422            public Builder addAllChildren(
10423                java.lang.Iterable<? extends java.lang.Long> values) {
10424              ensureChildrenIsMutable();
10425              super.addAll(values, children_);
10426              onChanged();
10427              return this;
10428            }
10429            /**
10430             * <code>repeated uint64 children = 2 [packed = true];</code>
10431             *
10432             * <pre>
10433             * children that are not reference nodes
10434             * </pre>
10435             */
10436            public Builder clearChildren() {
10437              children_ = java.util.Collections.emptyList();
10438              bitField0_ = (bitField0_ & ~0x00000002);
10439              onChanged();
10440              return this;
10441            }
10442    
10443            // repeated uint32 refChildren = 3 [packed = true];
10444            private java.util.List<java.lang.Integer> refChildren_ = java.util.Collections.emptyList();
10445            private void ensureRefChildrenIsMutable() {
10446              if (!((bitField0_ & 0x00000004) == 0x00000004)) {
10447                refChildren_ = new java.util.ArrayList<java.lang.Integer>(refChildren_);
10448                bitField0_ |= 0x00000004;
10449               }
10450            }
10451            /**
10452             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
10453             *
10454             * <pre>
10455             * children that are reference nodes, each element is a reference node id
10456             * </pre>
10457             */
10458            public java.util.List<java.lang.Integer>
10459                getRefChildrenList() {
10460              return java.util.Collections.unmodifiableList(refChildren_);
10461            }
10462            /**
10463             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
10464             *
10465             * <pre>
10466             * children that are reference nodes, each element is a reference node id
10467             * </pre>
10468             */
10469            public int getRefChildrenCount() {
10470              return refChildren_.size();
10471            }
10472            /**
10473             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
10474             *
10475             * <pre>
10476             * children that are reference nodes, each element is a reference node id
10477             * </pre>
10478             */
10479            public int getRefChildren(int index) {
10480              return refChildren_.get(index);
10481            }
10482            /**
10483             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
10484             *
10485             * <pre>
10486             * children that are reference nodes, each element is a reference node id
10487             * </pre>
10488             */
10489            public Builder setRefChildren(
10490                int index, int value) {
10491              ensureRefChildrenIsMutable();
10492              refChildren_.set(index, value);
10493              onChanged();
10494              return this;
10495            }
10496            /**
10497             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
10498             *
10499             * <pre>
10500             * children that are reference nodes, each element is a reference node id
10501             * </pre>
10502             */
10503            public Builder addRefChildren(int value) {
10504              ensureRefChildrenIsMutable();
10505              refChildren_.add(value);
10506              onChanged();
10507              return this;
10508            }
10509            /**
10510             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
10511             *
10512             * <pre>
10513             * children that are reference nodes, each element is a reference node id
10514             * </pre>
10515             */
10516            public Builder addAllRefChildren(
10517                java.lang.Iterable<? extends java.lang.Integer> values) {
10518              ensureRefChildrenIsMutable();
10519              super.addAll(values, refChildren_);
10520              onChanged();
10521              return this;
10522            }
10523            /**
10524             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
10525             *
10526             * <pre>
10527             * children that are reference nodes, each element is a reference node id
10528             * </pre>
10529             */
10530            public Builder clearRefChildren() {
10531              refChildren_ = java.util.Collections.emptyList();
10532              bitField0_ = (bitField0_ & ~0x00000004);
10533              onChanged();
10534              return this;
10535            }
10536    
10537            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry)
10538          }
10539    
10540          static {
10541            defaultInstance = new DirEntry(true);
10542            defaultInstance.initFields();
10543          }
10544    
10545          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry)
10546        }
10547    
10548        private void initFields() {
10549        }
10550        private byte memoizedIsInitialized = -1;
10551        public final boolean isInitialized() {
10552          byte isInitialized = memoizedIsInitialized;
10553          if (isInitialized != -1) return isInitialized == 1;
10554    
10555          memoizedIsInitialized = 1;
10556          return true;
10557        }
10558    
10559        public void writeTo(com.google.protobuf.CodedOutputStream output)
10560                            throws java.io.IOException {
10561          getSerializedSize();
10562          getUnknownFields().writeTo(output);
10563        }
10564    
10565        private int memoizedSerializedSize = -1;
10566        public int getSerializedSize() {
10567          int size = memoizedSerializedSize;
10568          if (size != -1) return size;
10569    
10570          size = 0;
10571          size += getUnknownFields().getSerializedSize();
10572          memoizedSerializedSize = size;
10573          return size;
10574        }
10575    
10576        private static final long serialVersionUID = 0L;
10577        @java.lang.Override
10578        protected java.lang.Object writeReplace()
10579            throws java.io.ObjectStreamException {
10580          return super.writeReplace();
10581        }
10582    
10583        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
10584            com.google.protobuf.ByteString data)
10585            throws com.google.protobuf.InvalidProtocolBufferException {
10586          return PARSER.parseFrom(data);
10587        }
10588        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
10589            com.google.protobuf.ByteString data,
10590            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10591            throws com.google.protobuf.InvalidProtocolBufferException {
10592          return PARSER.parseFrom(data, extensionRegistry);
10593        }
10594        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(byte[] data)
10595            throws com.google.protobuf.InvalidProtocolBufferException {
10596          return PARSER.parseFrom(data);
10597        }
10598        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
10599            byte[] data,
10600            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10601            throws com.google.protobuf.InvalidProtocolBufferException {
10602          return PARSER.parseFrom(data, extensionRegistry);
10603        }
10604        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(java.io.InputStream input)
10605            throws java.io.IOException {
10606          return PARSER.parseFrom(input);
10607        }
10608        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
10609            java.io.InputStream input,
10610            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10611            throws java.io.IOException {
10612          return PARSER.parseFrom(input, extensionRegistry);
10613        }
10614        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseDelimitedFrom(java.io.InputStream input)
10615            throws java.io.IOException {
10616          return PARSER.parseDelimitedFrom(input);
10617        }
10618        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseDelimitedFrom(
10619            java.io.InputStream input,
10620            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10621            throws java.io.IOException {
10622          return PARSER.parseDelimitedFrom(input, extensionRegistry);
10623        }
10624        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
10625            com.google.protobuf.CodedInputStream input)
10626            throws java.io.IOException {
10627          return PARSER.parseFrom(input);
10628        }
10629        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
10630            com.google.protobuf.CodedInputStream input,
10631            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10632            throws java.io.IOException {
10633          return PARSER.parseFrom(input, extensionRegistry);
10634        }
10635    
10636        public static Builder newBuilder() { return Builder.create(); }
10637        public Builder newBuilderForType() { return newBuilder(); }
10638        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection prototype) {
10639          return newBuilder().mergeFrom(prototype);
10640        }
10641        public Builder toBuilder() { return newBuilder(this); }
10642    
10643        @java.lang.Override
10644        protected Builder newBuilderForType(
10645            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10646          Builder builder = new Builder(parent);
10647          return builder;
10648        }
10649        /**
10650         * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection}
10651         *
10652         * <pre>
10653         **
10654         * This section records the children of each directories
10655         * NAME: INODE_DIR
10656         * </pre>
10657         */
10658        public static final class Builder extends
10659            com.google.protobuf.GeneratedMessage.Builder<Builder>
10660           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySectionOrBuilder {
10661          public static final com.google.protobuf.Descriptors.Descriptor
10662              getDescriptor() {
10663            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
10664          }
10665    
10666          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10667              internalGetFieldAccessorTable() {
10668            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable
10669                .ensureFieldAccessorsInitialized(
10670                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.Builder.class);
10671          }
10672    
10673          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.newBuilder()
10674          private Builder() {
10675            maybeForceBuilderInitialization();
10676          }
10677    
10678          private Builder(
10679              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10680            super(parent);
10681            maybeForceBuilderInitialization();
10682          }
10683          private void maybeForceBuilderInitialization() {
10684            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10685            }
10686          }
10687          private static Builder create() {
10688            return new Builder();
10689          }
10690    
10691          public Builder clear() {
10692            super.clear();
10693            return this;
10694          }
10695    
10696          public Builder clone() {
10697            return create().mergeFrom(buildPartial());
10698          }
10699    
10700          public com.google.protobuf.Descriptors.Descriptor
10701              getDescriptorForType() {
10702            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
10703          }
10704    
10705          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection getDefaultInstanceForType() {
10706            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.getDefaultInstance();
10707          }
10708    
10709          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection build() {
10710            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection result = buildPartial();
10711            if (!result.isInitialized()) {
10712              throw newUninitializedMessageException(result);
10713            }
10714            return result;
10715          }
10716    
10717          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection buildPartial() {
10718            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection(this);
10719            onBuilt();
10720            return result;
10721          }
10722    
10723          public Builder mergeFrom(com.google.protobuf.Message other) {
10724            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection) {
10725              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection)other);
10726            } else {
10727              super.mergeFrom(other);
10728              return this;
10729            }
10730          }
10731    
10732          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection other) {
10733            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.getDefaultInstance()) return this;
10734            this.mergeUnknownFields(other.getUnknownFields());
10735            return this;
10736          }
10737    
10738          public final boolean isInitialized() {
10739            return true;
10740          }
10741    
10742          public Builder mergeFrom(
10743              com.google.protobuf.CodedInputStream input,
10744              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10745              throws java.io.IOException {
10746            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parsedMessage = null;
10747            try {
10748              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10749            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10750              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection) e.getUnfinishedMessage();
10751              throw e;
10752            } finally {
10753              if (parsedMessage != null) {
10754                mergeFrom(parsedMessage);
10755              }
10756            }
10757            return this;
10758          }
10759    
10760          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeDirectorySection)
10761        }
10762    
10763        static {
10764          defaultInstance = new INodeDirectorySection(true);
10765          defaultInstance.initFields();
10766        }
10767    
10768        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeDirectorySection)
10769      }
10770    
10771      public interface INodeReferenceSectionOrBuilder
10772          extends com.google.protobuf.MessageOrBuilder {
10773      }
10774      /**
10775       * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection}
10776       */
10777      public static final class INodeReferenceSection extends
10778          com.google.protobuf.GeneratedMessage
10779          implements INodeReferenceSectionOrBuilder {
10780        // Use INodeReferenceSection.newBuilder() to construct.
10781        private INodeReferenceSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10782          super(builder);
10783          this.unknownFields = builder.getUnknownFields();
10784        }
10785        private INodeReferenceSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10786    
10787        private static final INodeReferenceSection defaultInstance;
10788        public static INodeReferenceSection getDefaultInstance() {
10789          return defaultInstance;
10790        }
10791    
10792        public INodeReferenceSection getDefaultInstanceForType() {
10793          return defaultInstance;
10794        }
10795    
10796        private final com.google.protobuf.UnknownFieldSet unknownFields;
10797        @java.lang.Override
10798        public final com.google.protobuf.UnknownFieldSet
10799            getUnknownFields() {
10800          return this.unknownFields;
10801        }
10802        private INodeReferenceSection(
10803            com.google.protobuf.CodedInputStream input,
10804            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10805            throws com.google.protobuf.InvalidProtocolBufferException {
10806          initFields();
10807          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
10808              com.google.protobuf.UnknownFieldSet.newBuilder();
10809          try {
10810            boolean done = false;
10811            while (!done) {
10812              int tag = input.readTag();
10813              switch (tag) {
10814                case 0:
10815                  done = true;
10816                  break;
10817                default: {
10818                  if (!parseUnknownField(input, unknownFields,
10819                                         extensionRegistry, tag)) {
10820                    done = true;
10821                  }
10822                  break;
10823                }
10824              }
10825            }
10826          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10827            throw e.setUnfinishedMessage(this);
10828          } catch (java.io.IOException e) {
10829            throw new com.google.protobuf.InvalidProtocolBufferException(
10830                e.getMessage()).setUnfinishedMessage(this);
10831          } finally {
10832            this.unknownFields = unknownFields.build();
10833            makeExtensionsImmutable();
10834          }
10835        }
10836        public static final com.google.protobuf.Descriptors.Descriptor
10837            getDescriptor() {
10838          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
10839        }
10840    
10841        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10842            internalGetFieldAccessorTable() {
10843          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable
10844              .ensureFieldAccessorsInitialized(
10845                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.Builder.class);
10846        }
10847    
10848        public static com.google.protobuf.Parser<INodeReferenceSection> PARSER =
10849            new com.google.protobuf.AbstractParser<INodeReferenceSection>() {
10850          public INodeReferenceSection parsePartialFrom(
10851              com.google.protobuf.CodedInputStream input,
10852              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10853              throws com.google.protobuf.InvalidProtocolBufferException {
10854            return new INodeReferenceSection(input, extensionRegistry);
10855          }
10856        };
10857    
10858        @java.lang.Override
10859        public com.google.protobuf.Parser<INodeReferenceSection> getParserForType() {
10860          return PARSER;
10861        }
10862    
10863        public interface INodeReferenceOrBuilder
10864            extends com.google.protobuf.MessageOrBuilder {
10865    
10866          // optional uint64 referredId = 1;
10867          /**
10868           * <code>optional uint64 referredId = 1;</code>
10869           *
10870           * <pre>
10871           * id of the referred inode
10872           * </pre>
10873           */
10874          boolean hasReferredId();
10875          /**
10876           * <code>optional uint64 referredId = 1;</code>
10877           *
10878           * <pre>
10879           * id of the referred inode
10880           * </pre>
10881           */
10882          long getReferredId();
10883    
10884          // optional bytes name = 2;
10885          /**
10886           * <code>optional bytes name = 2;</code>
10887           *
10888           * <pre>
10889           * local name recorded in WithName
10890           * </pre>
10891           */
10892          boolean hasName();
10893          /**
10894           * <code>optional bytes name = 2;</code>
10895           *
10896           * <pre>
10897           * local name recorded in WithName
10898           * </pre>
10899           */
10900          com.google.protobuf.ByteString getName();
10901    
10902          // optional uint32 dstSnapshotId = 3;
10903          /**
10904           * <code>optional uint32 dstSnapshotId = 3;</code>
10905           *
10906           * <pre>
10907           * recorded in DstReference
10908           * </pre>
10909           */
10910          boolean hasDstSnapshotId();
10911          /**
10912           * <code>optional uint32 dstSnapshotId = 3;</code>
10913           *
10914           * <pre>
10915           * recorded in DstReference
10916           * </pre>
10917           */
10918          int getDstSnapshotId();
10919    
10920          // optional uint32 lastSnapshotId = 4;
10921          /**
10922           * <code>optional uint32 lastSnapshotId = 4;</code>
10923           *
10924           * <pre>
10925           * recorded in WithName
10926           * </pre>
10927           */
10928          boolean hasLastSnapshotId();
10929          /**
10930           * <code>optional uint32 lastSnapshotId = 4;</code>
10931           *
10932           * <pre>
10933           * recorded in WithName
10934           * </pre>
10935           */
10936          int getLastSnapshotId();
10937        }
10938        /**
10939         * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference}
10940         */
10941        public static final class INodeReference extends
10942            com.google.protobuf.GeneratedMessage
10943            implements INodeReferenceOrBuilder {
10944          // Use INodeReference.newBuilder() to construct.
10945          private INodeReference(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10946            super(builder);
10947            this.unknownFields = builder.getUnknownFields();
10948          }
10949          private INodeReference(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10950    
10951          private static final INodeReference defaultInstance;
10952          public static INodeReference getDefaultInstance() {
10953            return defaultInstance;
10954          }
10955    
10956          public INodeReference getDefaultInstanceForType() {
10957            return defaultInstance;
10958          }
10959    
10960          private final com.google.protobuf.UnknownFieldSet unknownFields;
10961          @java.lang.Override
10962          public final com.google.protobuf.UnknownFieldSet
10963              getUnknownFields() {
10964            return this.unknownFields;
10965          }
10966          private INodeReference(
10967              com.google.protobuf.CodedInputStream input,
10968              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10969              throws com.google.protobuf.InvalidProtocolBufferException {
10970            initFields();
10971            int mutable_bitField0_ = 0;
10972            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
10973                com.google.protobuf.UnknownFieldSet.newBuilder();
10974            try {
10975              boolean done = false;
10976              while (!done) {
10977                int tag = input.readTag();
10978                switch (tag) {
10979                  case 0:
10980                    done = true;
10981                    break;
10982                  default: {
10983                    if (!parseUnknownField(input, unknownFields,
10984                                           extensionRegistry, tag)) {
10985                      done = true;
10986                    }
10987                    break;
10988                  }
10989                  case 8: {
10990                    bitField0_ |= 0x00000001;
10991                    referredId_ = input.readUInt64();
10992                    break;
10993                  }
10994                  case 18: {
10995                    bitField0_ |= 0x00000002;
10996                    name_ = input.readBytes();
10997                    break;
10998                  }
10999                  case 24: {
11000                    bitField0_ |= 0x00000004;
11001                    dstSnapshotId_ = input.readUInt32();
11002                    break;
11003                  }
11004                  case 32: {
11005                    bitField0_ |= 0x00000008;
11006                    lastSnapshotId_ = input.readUInt32();
11007                    break;
11008                  }
11009                }
11010              }
11011            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11012              throw e.setUnfinishedMessage(this);
11013            } catch (java.io.IOException e) {
11014              throw new com.google.protobuf.InvalidProtocolBufferException(
11015                  e.getMessage()).setUnfinishedMessage(this);
11016            } finally {
11017              this.unknownFields = unknownFields.build();
11018              makeExtensionsImmutable();
11019            }
11020          }
11021          public static final com.google.protobuf.Descriptors.Descriptor
11022              getDescriptor() {
11023            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
11024          }
11025    
11026          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11027              internalGetFieldAccessorTable() {
11028            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable
11029                .ensureFieldAccessorsInitialized(
11030                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.Builder.class);
11031          }
11032    
11033          public static com.google.protobuf.Parser<INodeReference> PARSER =
11034              new com.google.protobuf.AbstractParser<INodeReference>() {
11035            public INodeReference parsePartialFrom(
11036                com.google.protobuf.CodedInputStream input,
11037                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11038                throws com.google.protobuf.InvalidProtocolBufferException {
11039              return new INodeReference(input, extensionRegistry);
11040            }
11041          };
11042    
11043          @java.lang.Override
11044          public com.google.protobuf.Parser<INodeReference> getParserForType() {
11045            return PARSER;
11046          }
11047    
11048          private int bitField0_;
11049          // optional uint64 referredId = 1;
11050          public static final int REFERREDID_FIELD_NUMBER = 1;
11051          private long referredId_;
11052          /**
11053           * <code>optional uint64 referredId = 1;</code>
11054           *
11055           * <pre>
11056           * id of the referred inode
11057           * </pre>
11058           */
11059          public boolean hasReferredId() {
11060            return ((bitField0_ & 0x00000001) == 0x00000001);
11061          }
11062          /**
11063           * <code>optional uint64 referredId = 1;</code>
11064           *
11065           * <pre>
11066           * id of the referred inode
11067           * </pre>
11068           */
11069          public long getReferredId() {
11070            return referredId_;
11071          }
11072    
11073          // optional bytes name = 2;
11074          public static final int NAME_FIELD_NUMBER = 2;
11075          private com.google.protobuf.ByteString name_;
11076          /**
11077           * <code>optional bytes name = 2;</code>
11078           *
11079           * <pre>
11080           * local name recorded in WithName
11081           * </pre>
11082           */
11083          public boolean hasName() {
11084            return ((bitField0_ & 0x00000002) == 0x00000002);
11085          }
11086          /**
11087           * <code>optional bytes name = 2;</code>
11088           *
11089           * <pre>
11090           * local name recorded in WithName
11091           * </pre>
11092           */
11093          public com.google.protobuf.ByteString getName() {
11094            return name_;
11095          }
11096    
11097          // optional uint32 dstSnapshotId = 3;
11098          public static final int DSTSNAPSHOTID_FIELD_NUMBER = 3;
11099          private int dstSnapshotId_;
11100          /**
11101           * <code>optional uint32 dstSnapshotId = 3;</code>
11102           *
11103           * <pre>
11104           * recorded in DstReference
11105           * </pre>
11106           */
11107          public boolean hasDstSnapshotId() {
11108            return ((bitField0_ & 0x00000004) == 0x00000004);
11109          }
11110          /**
11111           * <code>optional uint32 dstSnapshotId = 3;</code>
11112           *
11113           * <pre>
11114           * recorded in DstReference
11115           * </pre>
11116           */
11117          public int getDstSnapshotId() {
11118            return dstSnapshotId_;
11119          }
11120    
11121          // optional uint32 lastSnapshotId = 4;
11122          public static final int LASTSNAPSHOTID_FIELD_NUMBER = 4;
11123          private int lastSnapshotId_;
11124          /**
11125           * <code>optional uint32 lastSnapshotId = 4;</code>
11126           *
11127           * <pre>
11128           * recorded in WithName
11129           * </pre>
11130           */
11131          public boolean hasLastSnapshotId() {
11132            return ((bitField0_ & 0x00000008) == 0x00000008);
11133          }
11134          /**
11135           * <code>optional uint32 lastSnapshotId = 4;</code>
11136           *
11137           * <pre>
11138           * recorded in WithName
11139           * </pre>
11140           */
11141          public int getLastSnapshotId() {
11142            return lastSnapshotId_;
11143          }
11144    
11145          private void initFields() {
11146            referredId_ = 0L;
11147            name_ = com.google.protobuf.ByteString.EMPTY;
11148            dstSnapshotId_ = 0;
11149            lastSnapshotId_ = 0;
11150          }
11151          private byte memoizedIsInitialized = -1;
11152          public final boolean isInitialized() {
11153            byte isInitialized = memoizedIsInitialized;
11154            if (isInitialized != -1) return isInitialized == 1;
11155    
11156            memoizedIsInitialized = 1;
11157            return true;
11158          }
11159    
11160          public void writeTo(com.google.protobuf.CodedOutputStream output)
11161                              throws java.io.IOException {
11162            getSerializedSize();
11163            if (((bitField0_ & 0x00000001) == 0x00000001)) {
11164              output.writeUInt64(1, referredId_);
11165            }
11166            if (((bitField0_ & 0x00000002) == 0x00000002)) {
11167              output.writeBytes(2, name_);
11168            }
11169            if (((bitField0_ & 0x00000004) == 0x00000004)) {
11170              output.writeUInt32(3, dstSnapshotId_);
11171            }
11172            if (((bitField0_ & 0x00000008) == 0x00000008)) {
11173              output.writeUInt32(4, lastSnapshotId_);
11174            }
11175            getUnknownFields().writeTo(output);
11176          }
11177    
11178          private int memoizedSerializedSize = -1;
11179          public int getSerializedSize() {
11180            int size = memoizedSerializedSize;
11181            if (size != -1) return size;
11182    
11183            size = 0;
11184            if (((bitField0_ & 0x00000001) == 0x00000001)) {
11185              size += com.google.protobuf.CodedOutputStream
11186                .computeUInt64Size(1, referredId_);
11187            }
11188            if (((bitField0_ & 0x00000002) == 0x00000002)) {
11189              size += com.google.protobuf.CodedOutputStream
11190                .computeBytesSize(2, name_);
11191            }
11192            if (((bitField0_ & 0x00000004) == 0x00000004)) {
11193              size += com.google.protobuf.CodedOutputStream
11194                .computeUInt32Size(3, dstSnapshotId_);
11195            }
11196            if (((bitField0_ & 0x00000008) == 0x00000008)) {
11197              size += com.google.protobuf.CodedOutputStream
11198                .computeUInt32Size(4, lastSnapshotId_);
11199            }
11200            size += getUnknownFields().getSerializedSize();
11201            memoizedSerializedSize = size;
11202            return size;
11203          }
11204    
11205          private static final long serialVersionUID = 0L;
11206          @java.lang.Override
11207          protected java.lang.Object writeReplace()
11208              throws java.io.ObjectStreamException {
11209            return super.writeReplace();
11210          }
11211    
11212          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
11213              com.google.protobuf.ByteString data)
11214              throws com.google.protobuf.InvalidProtocolBufferException {
11215            return PARSER.parseFrom(data);
11216          }
11217          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
11218              com.google.protobuf.ByteString data,
11219              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11220              throws com.google.protobuf.InvalidProtocolBufferException {
11221            return PARSER.parseFrom(data, extensionRegistry);
11222          }
11223          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(byte[] data)
11224              throws com.google.protobuf.InvalidProtocolBufferException {
11225            return PARSER.parseFrom(data);
11226          }
11227          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
11228              byte[] data,
11229              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11230              throws com.google.protobuf.InvalidProtocolBufferException {
11231            return PARSER.parseFrom(data, extensionRegistry);
11232          }
11233          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(java.io.InputStream input)
11234              throws java.io.IOException {
11235            return PARSER.parseFrom(input);
11236          }
11237          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
11238              java.io.InputStream input,
11239              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11240              throws java.io.IOException {
11241            return PARSER.parseFrom(input, extensionRegistry);
11242          }
11243          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseDelimitedFrom(java.io.InputStream input)
11244              throws java.io.IOException {
11245            return PARSER.parseDelimitedFrom(input);
11246          }
11247          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseDelimitedFrom(
11248              java.io.InputStream input,
11249              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11250              throws java.io.IOException {
11251            return PARSER.parseDelimitedFrom(input, extensionRegistry);
11252          }
11253          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
11254              com.google.protobuf.CodedInputStream input)
11255              throws java.io.IOException {
11256            return PARSER.parseFrom(input);
11257          }
11258          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
11259              com.google.protobuf.CodedInputStream input,
11260              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11261              throws java.io.IOException {
11262            return PARSER.parseFrom(input, extensionRegistry);
11263          }
11264    
11265          public static Builder newBuilder() { return Builder.create(); }
11266          public Builder newBuilderForType() { return newBuilder(); }
11267          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference prototype) {
11268            return newBuilder().mergeFrom(prototype);
11269          }
11270          public Builder toBuilder() { return newBuilder(this); }
11271    
11272          @java.lang.Override
11273          protected Builder newBuilderForType(
11274              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11275            Builder builder = new Builder(parent);
11276            return builder;
11277          }
11278          /**
11279           * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference}
11280           */
11281          public static final class Builder extends
11282              com.google.protobuf.GeneratedMessage.Builder<Builder>
11283             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReferenceOrBuilder {
11284            public static final com.google.protobuf.Descriptors.Descriptor
11285                getDescriptor() {
11286              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
11287            }
11288    
11289            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11290                internalGetFieldAccessorTable() {
11291              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable
11292                  .ensureFieldAccessorsInitialized(
11293                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.Builder.class);
11294            }
11295    
11296            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.newBuilder()
11297            private Builder() {
11298              maybeForceBuilderInitialization();
11299            }
11300    
11301            private Builder(
11302                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11303              super(parent);
11304              maybeForceBuilderInitialization();
11305            }
11306            private void maybeForceBuilderInitialization() {
11307              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11308              }
11309            }
11310            private static Builder create() {
11311              return new Builder();
11312            }
11313    
11314            public Builder clear() {
11315              super.clear();
11316              referredId_ = 0L;
11317              bitField0_ = (bitField0_ & ~0x00000001);
11318              name_ = com.google.protobuf.ByteString.EMPTY;
11319              bitField0_ = (bitField0_ & ~0x00000002);
11320              dstSnapshotId_ = 0;
11321              bitField0_ = (bitField0_ & ~0x00000004);
11322              lastSnapshotId_ = 0;
11323              bitField0_ = (bitField0_ & ~0x00000008);
11324              return this;
11325            }
11326    
11327            public Builder clone() {
11328              return create().mergeFrom(buildPartial());
11329            }
11330    
11331            public com.google.protobuf.Descriptors.Descriptor
11332                getDescriptorForType() {
11333              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
11334            }
11335    
11336            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference getDefaultInstanceForType() {
11337              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.getDefaultInstance();
11338            }
11339    
11340            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference build() {
11341              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference result = buildPartial();
11342              if (!result.isInitialized()) {
11343                throw newUninitializedMessageException(result);
11344              }
11345              return result;
11346            }
11347    
11348            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference buildPartial() {
11349              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference(this);
11350              int from_bitField0_ = bitField0_;
11351              int to_bitField0_ = 0;
11352              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
11353                to_bitField0_ |= 0x00000001;
11354              }
11355              result.referredId_ = referredId_;
11356              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
11357                to_bitField0_ |= 0x00000002;
11358              }
11359              result.name_ = name_;
11360              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
11361                to_bitField0_ |= 0x00000004;
11362              }
11363              result.dstSnapshotId_ = dstSnapshotId_;
11364              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
11365                to_bitField0_ |= 0x00000008;
11366              }
11367              result.lastSnapshotId_ = lastSnapshotId_;
11368              result.bitField0_ = to_bitField0_;
11369              onBuilt();
11370              return result;
11371            }
11372    
11373            public Builder mergeFrom(com.google.protobuf.Message other) {
11374              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference) {
11375                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference)other);
11376              } else {
11377                super.mergeFrom(other);
11378                return this;
11379              }
11380            }
11381    
11382            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference other) {
11383              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.getDefaultInstance()) return this;
11384              if (other.hasReferredId()) {
11385                setReferredId(other.getReferredId());
11386              }
11387              if (other.hasName()) {
11388                setName(other.getName());
11389              }
11390              if (other.hasDstSnapshotId()) {
11391                setDstSnapshotId(other.getDstSnapshotId());
11392              }
11393              if (other.hasLastSnapshotId()) {
11394                setLastSnapshotId(other.getLastSnapshotId());
11395              }
11396              this.mergeUnknownFields(other.getUnknownFields());
11397              return this;
11398            }
11399    
11400            public final boolean isInitialized() {
11401              return true;
11402            }
11403    
11404            public Builder mergeFrom(
11405                com.google.protobuf.CodedInputStream input,
11406                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11407                throws java.io.IOException {
11408              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parsedMessage = null;
11409              try {
11410                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11411              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11412                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference) e.getUnfinishedMessage();
11413                throw e;
11414              } finally {
11415                if (parsedMessage != null) {
11416                  mergeFrom(parsedMessage);
11417                }
11418              }
11419              return this;
11420            }
11421            private int bitField0_;
11422    
11423            // optional uint64 referredId = 1;
11424            private long referredId_ ;
11425            /**
11426             * <code>optional uint64 referredId = 1;</code>
11427             *
11428             * <pre>
11429             * id of the referred inode
11430             * </pre>
11431             */
11432            public boolean hasReferredId() {
11433              return ((bitField0_ & 0x00000001) == 0x00000001);
11434            }
11435            /**
11436             * <code>optional uint64 referredId = 1;</code>
11437             *
11438             * <pre>
11439             * id of the referred inode
11440             * </pre>
11441             */
11442            public long getReferredId() {
11443              return referredId_;
11444            }
11445            /**
11446             * <code>optional uint64 referredId = 1;</code>
11447             *
11448             * <pre>
11449             * id of the referred inode
11450             * </pre>
11451             */
11452            public Builder setReferredId(long value) {
11453              bitField0_ |= 0x00000001;
11454              referredId_ = value;
11455              onChanged();
11456              return this;
11457            }
11458            /**
11459             * <code>optional uint64 referredId = 1;</code>
11460             *
11461             * <pre>
11462             * id of the referred inode
11463             * </pre>
11464             */
11465            public Builder clearReferredId() {
11466              bitField0_ = (bitField0_ & ~0x00000001);
11467              referredId_ = 0L;
11468              onChanged();
11469              return this;
11470            }
11471    
11472            // optional bytes name = 2;
11473            private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
11474            /**
11475             * <code>optional bytes name = 2;</code>
11476             *
11477             * <pre>
11478             * local name recorded in WithName
11479             * </pre>
11480             */
11481            public boolean hasName() {
11482              return ((bitField0_ & 0x00000002) == 0x00000002);
11483            }
11484            /**
11485             * <code>optional bytes name = 2;</code>
11486             *
11487             * <pre>
11488             * local name recorded in WithName
11489             * </pre>
11490             */
11491            public com.google.protobuf.ByteString getName() {
11492              return name_;
11493            }
11494            /**
11495             * <code>optional bytes name = 2;</code>
11496             *
11497             * <pre>
11498             * local name recorded in WithName
11499             * </pre>
11500             */
11501            public Builder setName(com.google.protobuf.ByteString value) {
11502              if (value == null) {
11503        throw new NullPointerException();
11504      }
11505      bitField0_ |= 0x00000002;
11506              name_ = value;
11507              onChanged();
11508              return this;
11509            }
11510            /**
11511             * <code>optional bytes name = 2;</code>
11512             *
11513             * <pre>
11514             * local name recorded in WithName
11515             * </pre>
11516             */
11517            public Builder clearName() {
11518              bitField0_ = (bitField0_ & ~0x00000002);
11519              name_ = getDefaultInstance().getName();
11520              onChanged();
11521              return this;
11522            }
11523    
11524            // optional uint32 dstSnapshotId = 3;
11525            private int dstSnapshotId_ ;
11526            /**
11527             * <code>optional uint32 dstSnapshotId = 3;</code>
11528             *
11529             * <pre>
11530             * recorded in DstReference
11531             * </pre>
11532             */
11533            public boolean hasDstSnapshotId() {
11534              return ((bitField0_ & 0x00000004) == 0x00000004);
11535            }
11536            /**
11537             * <code>optional uint32 dstSnapshotId = 3;</code>
11538             *
11539             * <pre>
11540             * recorded in DstReference
11541             * </pre>
11542             */
11543            public int getDstSnapshotId() {
11544              return dstSnapshotId_;
11545            }
11546            /**
11547             * <code>optional uint32 dstSnapshotId = 3;</code>
11548             *
11549             * <pre>
11550             * recorded in DstReference
11551             * </pre>
11552             */
11553            public Builder setDstSnapshotId(int value) {
11554              bitField0_ |= 0x00000004;
11555              dstSnapshotId_ = value;
11556              onChanged();
11557              return this;
11558            }
11559            /**
11560             * <code>optional uint32 dstSnapshotId = 3;</code>
11561             *
11562             * <pre>
11563             * recorded in DstReference
11564             * </pre>
11565             */
11566            public Builder clearDstSnapshotId() {
11567              bitField0_ = (bitField0_ & ~0x00000004);
11568              dstSnapshotId_ = 0;
11569              onChanged();
11570              return this;
11571            }
11572    
11573            // optional uint32 lastSnapshotId = 4;
11574            private int lastSnapshotId_ ;
11575            /**
11576             * <code>optional uint32 lastSnapshotId = 4;</code>
11577             *
11578             * <pre>
11579             * recorded in WithName
11580             * </pre>
11581             */
11582            public boolean hasLastSnapshotId() {
11583              return ((bitField0_ & 0x00000008) == 0x00000008);
11584            }
11585            /**
11586             * <code>optional uint32 lastSnapshotId = 4;</code>
11587             *
11588             * <pre>
11589             * recorded in WithName
11590             * </pre>
11591             */
11592            public int getLastSnapshotId() {
11593              return lastSnapshotId_;
11594            }
11595            /**
11596             * <code>optional uint32 lastSnapshotId = 4;</code>
11597             *
11598             * <pre>
11599             * recorded in WithName
11600             * </pre>
11601             */
11602            public Builder setLastSnapshotId(int value) {
11603              bitField0_ |= 0x00000008;
11604              lastSnapshotId_ = value;
11605              onChanged();
11606              return this;
11607            }
11608            /**
11609             * <code>optional uint32 lastSnapshotId = 4;</code>
11610             *
11611             * <pre>
11612             * recorded in WithName
11613             * </pre>
11614             */
11615            public Builder clearLastSnapshotId() {
11616              bitField0_ = (bitField0_ & ~0x00000008);
11617              lastSnapshotId_ = 0;
11618              onChanged();
11619              return this;
11620            }
11621    
11622            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference)
11623          }
11624    
11625          static {
11626            defaultInstance = new INodeReference(true);
11627            defaultInstance.initFields();
11628          }
11629    
11630          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference)
11631        }
11632    
11633        private void initFields() {
11634        }
11635        private byte memoizedIsInitialized = -1;
11636        public final boolean isInitialized() {
11637          byte isInitialized = memoizedIsInitialized;
11638          if (isInitialized != -1) return isInitialized == 1;
11639    
11640          memoizedIsInitialized = 1;
11641          return true;
11642        }
11643    
11644        public void writeTo(com.google.protobuf.CodedOutputStream output)
11645                            throws java.io.IOException {
11646          getSerializedSize();
11647          getUnknownFields().writeTo(output);
11648        }
11649    
11650        private int memoizedSerializedSize = -1;
11651        public int getSerializedSize() {
11652          int size = memoizedSerializedSize;
11653          if (size != -1) return size;
11654    
11655          size = 0;
11656          size += getUnknownFields().getSerializedSize();
11657          memoizedSerializedSize = size;
11658          return size;
11659        }
11660    
11661        private static final long serialVersionUID = 0L;
11662        @java.lang.Override
11663        protected java.lang.Object writeReplace()
11664            throws java.io.ObjectStreamException {
11665          return super.writeReplace();
11666        }
11667    
11668        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
11669            com.google.protobuf.ByteString data)
11670            throws com.google.protobuf.InvalidProtocolBufferException {
11671          return PARSER.parseFrom(data);
11672        }
11673        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
11674            com.google.protobuf.ByteString data,
11675            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11676            throws com.google.protobuf.InvalidProtocolBufferException {
11677          return PARSER.parseFrom(data, extensionRegistry);
11678        }
11679        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(byte[] data)
11680            throws com.google.protobuf.InvalidProtocolBufferException {
11681          return PARSER.parseFrom(data);
11682        }
11683        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
11684            byte[] data,
11685            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11686            throws com.google.protobuf.InvalidProtocolBufferException {
11687          return PARSER.parseFrom(data, extensionRegistry);
11688        }
11689        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(java.io.InputStream input)
11690            throws java.io.IOException {
11691          return PARSER.parseFrom(input);
11692        }
11693        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
11694            java.io.InputStream input,
11695            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11696            throws java.io.IOException {
11697          return PARSER.parseFrom(input, extensionRegistry);
11698        }
11699        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseDelimitedFrom(java.io.InputStream input)
11700            throws java.io.IOException {
11701          return PARSER.parseDelimitedFrom(input);
11702        }
11703        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseDelimitedFrom(
11704            java.io.InputStream input,
11705            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11706            throws java.io.IOException {
11707          return PARSER.parseDelimitedFrom(input, extensionRegistry);
11708        }
11709        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
11710            com.google.protobuf.CodedInputStream input)
11711            throws java.io.IOException {
11712          return PARSER.parseFrom(input);
11713        }
11714        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
11715            com.google.protobuf.CodedInputStream input,
11716            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11717            throws java.io.IOException {
11718          return PARSER.parseFrom(input, extensionRegistry);
11719        }
11720    
11721        public static Builder newBuilder() { return Builder.create(); }
11722        public Builder newBuilderForType() { return newBuilder(); }
11723        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection prototype) {
11724          return newBuilder().mergeFrom(prototype);
11725        }
11726        public Builder toBuilder() { return newBuilder(this); }
11727    
11728        @java.lang.Override
11729        protected Builder newBuilderForType(
11730            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11731          Builder builder = new Builder(parent);
11732          return builder;
11733        }
11734        /**
11735         * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection}
11736         */
11737        public static final class Builder extends
11738            com.google.protobuf.GeneratedMessage.Builder<Builder>
11739           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSectionOrBuilder {
11740          public static final com.google.protobuf.Descriptors.Descriptor
11741              getDescriptor() {
11742            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
11743          }
11744    
11745          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11746              internalGetFieldAccessorTable() {
11747            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable
11748                .ensureFieldAccessorsInitialized(
11749                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.Builder.class);
11750          }
11751    
11752          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.newBuilder()
11753          private Builder() {
11754            maybeForceBuilderInitialization();
11755          }
11756    
11757          private Builder(
11758              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11759            super(parent);
11760            maybeForceBuilderInitialization();
11761          }
11762          private void maybeForceBuilderInitialization() {
11763            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11764            }
11765          }
11766          private static Builder create() {
11767            return new Builder();
11768          }
11769    
11770          public Builder clear() {
11771            super.clear();
11772            return this;
11773          }
11774    
11775          public Builder clone() {
11776            return create().mergeFrom(buildPartial());
11777          }
11778    
11779          public com.google.protobuf.Descriptors.Descriptor
11780              getDescriptorForType() {
11781            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
11782          }
11783    
11784          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection getDefaultInstanceForType() {
11785            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.getDefaultInstance();
11786          }
11787    
11788          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection build() {
11789            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection result = buildPartial();
11790            if (!result.isInitialized()) {
11791              throw newUninitializedMessageException(result);
11792            }
11793            return result;
11794          }
11795    
11796          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection buildPartial() {
11797            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection(this);
11798            onBuilt();
11799            return result;
11800          }
11801    
11802          public Builder mergeFrom(com.google.protobuf.Message other) {
11803            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection) {
11804              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection)other);
11805            } else {
11806              super.mergeFrom(other);
11807              return this;
11808            }
11809          }
11810    
11811          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection other) {
11812            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.getDefaultInstance()) return this;
11813            this.mergeUnknownFields(other.getUnknownFields());
11814            return this;
11815          }
11816    
11817          public final boolean isInitialized() {
11818            return true;
11819          }
11820    
11821          public Builder mergeFrom(
11822              com.google.protobuf.CodedInputStream input,
11823              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11824              throws java.io.IOException {
11825            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parsedMessage = null;
11826            try {
11827              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11828            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11829              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection) e.getUnfinishedMessage();
11830              throw e;
11831            } finally {
11832              if (parsedMessage != null) {
11833                mergeFrom(parsedMessage);
11834              }
11835            }
11836            return this;
11837          }
11838    
11839          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeReferenceSection)
11840        }
11841    
11842        static {
11843          defaultInstance = new INodeReferenceSection(true);
11844          defaultInstance.initFields();
11845        }
11846    
11847        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeReferenceSection)
11848      }
11849    
11850      public interface SnapshotSectionOrBuilder
11851          extends com.google.protobuf.MessageOrBuilder {
11852    
11853        // optional uint32 snapshotCounter = 1;
11854        /**
11855         * <code>optional uint32 snapshotCounter = 1;</code>
11856         */
11857        boolean hasSnapshotCounter();
11858        /**
11859         * <code>optional uint32 snapshotCounter = 1;</code>
11860         */
11861        int getSnapshotCounter();
11862    
11863        // repeated uint64 snapshottableDir = 2 [packed = true];
11864        /**
11865         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
11866         */
11867        java.util.List<java.lang.Long> getSnapshottableDirList();
11868        /**
11869         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
11870         */
11871        int getSnapshottableDirCount();
11872        /**
11873         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
11874         */
11875        long getSnapshottableDir(int index);
11876    
11877        // optional uint32 numSnapshots = 3;
11878        /**
11879         * <code>optional uint32 numSnapshots = 3;</code>
11880         *
11881         * <pre>
11882         * total number of snapshots
11883         * </pre>
11884         */
11885        boolean hasNumSnapshots();
11886        /**
11887         * <code>optional uint32 numSnapshots = 3;</code>
11888         *
11889         * <pre>
11890         * total number of snapshots
11891         * </pre>
11892         */
11893        int getNumSnapshots();
11894      }
11895      /**
11896       * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection}
11897       *
11898       * <pre>
11899       **
11900       * This section records the information about snapshot
11901       * NAME: SNAPSHOT
11902       * </pre>
11903       */
11904      public static final class SnapshotSection extends
11905          com.google.protobuf.GeneratedMessage
11906          implements SnapshotSectionOrBuilder {
11907        // Use SnapshotSection.newBuilder() to construct.
11908        private SnapshotSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11909          super(builder);
11910          this.unknownFields = builder.getUnknownFields();
11911        }
11912        private SnapshotSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11913    
11914        private static final SnapshotSection defaultInstance;
11915        public static SnapshotSection getDefaultInstance() {
11916          return defaultInstance;
11917        }
11918    
11919        public SnapshotSection getDefaultInstanceForType() {
11920          return defaultInstance;
11921        }
11922    
11923        private final com.google.protobuf.UnknownFieldSet unknownFields;
11924        @java.lang.Override
11925        public final com.google.protobuf.UnknownFieldSet
11926            getUnknownFields() {
11927          return this.unknownFields;
11928        }
11929        private SnapshotSection(
11930            com.google.protobuf.CodedInputStream input,
11931            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11932            throws com.google.protobuf.InvalidProtocolBufferException {
11933          initFields();
11934          int mutable_bitField0_ = 0;
11935          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11936              com.google.protobuf.UnknownFieldSet.newBuilder();
11937          try {
11938            boolean done = false;
11939            while (!done) {
11940              int tag = input.readTag();
11941              switch (tag) {
11942                case 0:
11943                  done = true;
11944                  break;
11945                default: {
11946                  if (!parseUnknownField(input, unknownFields,
11947                                         extensionRegistry, tag)) {
11948                    done = true;
11949                  }
11950                  break;
11951                }
11952                case 8: {
11953                  bitField0_ |= 0x00000001;
11954                  snapshotCounter_ = input.readUInt32();
11955                  break;
11956                }
11957                case 16: {
11958                  if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
11959                    snapshottableDir_ = new java.util.ArrayList<java.lang.Long>();
11960                    mutable_bitField0_ |= 0x00000002;
11961                  }
11962                  snapshottableDir_.add(input.readUInt64());
11963                  break;
11964                }
11965                case 18: {
11966                  int length = input.readRawVarint32();
11967                  int limit = input.pushLimit(length);
11968                  if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) {
11969                    snapshottableDir_ = new java.util.ArrayList<java.lang.Long>();
11970                    mutable_bitField0_ |= 0x00000002;
11971                  }
11972                  while (input.getBytesUntilLimit() > 0) {
11973                    snapshottableDir_.add(input.readUInt64());
11974                  }
11975                  input.popLimit(limit);
11976                  break;
11977                }
11978                case 24: {
11979                  bitField0_ |= 0x00000002;
11980                  numSnapshots_ = input.readUInt32();
11981                  break;
11982                }
11983              }
11984            }
11985          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11986            throw e.setUnfinishedMessage(this);
11987          } catch (java.io.IOException e) {
11988            throw new com.google.protobuf.InvalidProtocolBufferException(
11989                e.getMessage()).setUnfinishedMessage(this);
11990          } finally {
11991            if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
11992              snapshottableDir_ = java.util.Collections.unmodifiableList(snapshottableDir_);
11993            }
11994            this.unknownFields = unknownFields.build();
11995            makeExtensionsImmutable();
11996          }
11997        }
11998        public static final com.google.protobuf.Descriptors.Descriptor
11999            getDescriptor() {
12000          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
12001        }
12002    
12003        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12004            internalGetFieldAccessorTable() {
12005          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable
12006              .ensureFieldAccessorsInitialized(
12007                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Builder.class);
12008        }
12009    
12010        public static com.google.protobuf.Parser<SnapshotSection> PARSER =
12011            new com.google.protobuf.AbstractParser<SnapshotSection>() {
12012          public SnapshotSection parsePartialFrom(
12013              com.google.protobuf.CodedInputStream input,
12014              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12015              throws com.google.protobuf.InvalidProtocolBufferException {
12016            return new SnapshotSection(input, extensionRegistry);
12017          }
12018        };
12019    
12020        @java.lang.Override
12021        public com.google.protobuf.Parser<SnapshotSection> getParserForType() {
12022          return PARSER;
12023        }
12024    
12025        public interface SnapshotOrBuilder
12026            extends com.google.protobuf.MessageOrBuilder {
12027    
12028          // optional uint32 snapshotId = 1;
12029          /**
12030           * <code>optional uint32 snapshotId = 1;</code>
12031           */
12032          boolean hasSnapshotId();
12033          /**
12034           * <code>optional uint32 snapshotId = 1;</code>
12035           */
12036          int getSnapshotId();
12037    
12038          // optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;
12039          /**
12040           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12041           *
12042           * <pre>
12043           * Snapshot root
12044           * </pre>
12045           */
12046          boolean hasRoot();
12047          /**
12048           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12049           *
12050           * <pre>
12051           * Snapshot root
12052           * </pre>
12053           */
12054          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getRoot();
12055          /**
12056           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12057           *
12058           * <pre>
12059           * Snapshot root
12060           * </pre>
12061           */
12062          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder getRootOrBuilder();
12063        }
12064        /**
12065         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection.Snapshot}
12066         */
12067        public static final class Snapshot extends
12068            com.google.protobuf.GeneratedMessage
12069            implements SnapshotOrBuilder {
12070          // Use Snapshot.newBuilder() to construct.
12071          private Snapshot(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12072            super(builder);
12073            this.unknownFields = builder.getUnknownFields();
12074          }
12075          private Snapshot(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12076    
12077          private static final Snapshot defaultInstance;
12078          public static Snapshot getDefaultInstance() {
12079            return defaultInstance;
12080          }
12081    
12082          public Snapshot getDefaultInstanceForType() {
12083            return defaultInstance;
12084          }
12085    
12086          private final com.google.protobuf.UnknownFieldSet unknownFields;
12087          @java.lang.Override
12088          public final com.google.protobuf.UnknownFieldSet
12089              getUnknownFields() {
12090            return this.unknownFields;
12091          }
12092          private Snapshot(
12093              com.google.protobuf.CodedInputStream input,
12094              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12095              throws com.google.protobuf.InvalidProtocolBufferException {
12096            initFields();
12097            int mutable_bitField0_ = 0;
12098            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12099                com.google.protobuf.UnknownFieldSet.newBuilder();
12100            try {
12101              boolean done = false;
12102              while (!done) {
12103                int tag = input.readTag();
12104                switch (tag) {
12105                  case 0:
12106                    done = true;
12107                    break;
12108                  default: {
12109                    if (!parseUnknownField(input, unknownFields,
12110                                           extensionRegistry, tag)) {
12111                      done = true;
12112                    }
12113                    break;
12114                  }
12115                  case 8: {
12116                    bitField0_ |= 0x00000001;
12117                    snapshotId_ = input.readUInt32();
12118                    break;
12119                  }
12120                  case 18: {
12121                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder subBuilder = null;
12122                    if (((bitField0_ & 0x00000002) == 0x00000002)) {
12123                      subBuilder = root_.toBuilder();
12124                    }
12125                    root_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.PARSER, extensionRegistry);
12126                    if (subBuilder != null) {
12127                      subBuilder.mergeFrom(root_);
12128                      root_ = subBuilder.buildPartial();
12129                    }
12130                    bitField0_ |= 0x00000002;
12131                    break;
12132                  }
12133                }
12134              }
12135            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12136              throw e.setUnfinishedMessage(this);
12137            } catch (java.io.IOException e) {
12138              throw new com.google.protobuf.InvalidProtocolBufferException(
12139                  e.getMessage()).setUnfinishedMessage(this);
12140            } finally {
12141              this.unknownFields = unknownFields.build();
12142              makeExtensionsImmutable();
12143            }
12144          }
12145          public static final com.google.protobuf.Descriptors.Descriptor
12146              getDescriptor() {
12147            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
12148          }
12149    
12150          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12151              internalGetFieldAccessorTable() {
12152            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable
12153                .ensureFieldAccessorsInitialized(
12154                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.Builder.class);
12155          }
12156    
12157          public static com.google.protobuf.Parser<Snapshot> PARSER =
12158              new com.google.protobuf.AbstractParser<Snapshot>() {
12159            public Snapshot parsePartialFrom(
12160                com.google.protobuf.CodedInputStream input,
12161                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12162                throws com.google.protobuf.InvalidProtocolBufferException {
12163              return new Snapshot(input, extensionRegistry);
12164            }
12165          };
12166    
12167          @java.lang.Override
12168          public com.google.protobuf.Parser<Snapshot> getParserForType() {
12169            return PARSER;
12170          }
12171    
12172          private int bitField0_;
12173          // optional uint32 snapshotId = 1;
12174          public static final int SNAPSHOTID_FIELD_NUMBER = 1;
12175          private int snapshotId_;
12176          /**
12177           * <code>optional uint32 snapshotId = 1;</code>
12178           */
12179          public boolean hasSnapshotId() {
12180            return ((bitField0_ & 0x00000001) == 0x00000001);
12181          }
12182          /**
12183           * <code>optional uint32 snapshotId = 1;</code>
12184           */
12185          public int getSnapshotId() {
12186            return snapshotId_;
12187          }
12188    
12189          // optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;
12190          public static final int ROOT_FIELD_NUMBER = 2;
12191          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode root_;
12192          /**
12193           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12194           *
12195           * <pre>
12196           * Snapshot root
12197           * </pre>
12198           */
12199          public boolean hasRoot() {
12200            return ((bitField0_ & 0x00000002) == 0x00000002);
12201          }
12202          /**
12203           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12204           *
12205           * <pre>
12206           * Snapshot root
12207           * </pre>
12208           */
12209          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getRoot() {
12210            return root_;
12211          }
12212          /**
12213           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12214           *
12215           * <pre>
12216           * Snapshot root
12217           * </pre>
12218           */
12219          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder getRootOrBuilder() {
12220            return root_;
12221          }
12222    
12223          private void initFields() {
12224            snapshotId_ = 0;
12225            root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
12226          }
12227          private byte memoizedIsInitialized = -1;
12228          public final boolean isInitialized() {
12229            byte isInitialized = memoizedIsInitialized;
12230            if (isInitialized != -1) return isInitialized == 1;
12231    
12232            if (hasRoot()) {
12233              if (!getRoot().isInitialized()) {
12234                memoizedIsInitialized = 0;
12235                return false;
12236              }
12237            }
12238            memoizedIsInitialized = 1;
12239            return true;
12240          }
12241    
12242          public void writeTo(com.google.protobuf.CodedOutputStream output)
12243                              throws java.io.IOException {
12244            getSerializedSize();
12245            if (((bitField0_ & 0x00000001) == 0x00000001)) {
12246              output.writeUInt32(1, snapshotId_);
12247            }
12248            if (((bitField0_ & 0x00000002) == 0x00000002)) {
12249              output.writeMessage(2, root_);
12250            }
12251            getUnknownFields().writeTo(output);
12252          }
12253    
12254          private int memoizedSerializedSize = -1;
12255          public int getSerializedSize() {
12256            int size = memoizedSerializedSize;
12257            if (size != -1) return size;
12258    
12259            size = 0;
12260            if (((bitField0_ & 0x00000001) == 0x00000001)) {
12261              size += com.google.protobuf.CodedOutputStream
12262                .computeUInt32Size(1, snapshotId_);
12263            }
12264            if (((bitField0_ & 0x00000002) == 0x00000002)) {
12265              size += com.google.protobuf.CodedOutputStream
12266                .computeMessageSize(2, root_);
12267            }
12268            size += getUnknownFields().getSerializedSize();
12269            memoizedSerializedSize = size;
12270            return size;
12271          }
12272    
12273          private static final long serialVersionUID = 0L;
12274          @java.lang.Override
12275          protected java.lang.Object writeReplace()
12276              throws java.io.ObjectStreamException {
12277            return super.writeReplace();
12278          }
12279    
12280          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
12281              com.google.protobuf.ByteString data)
12282              throws com.google.protobuf.InvalidProtocolBufferException {
12283            return PARSER.parseFrom(data);
12284          }
12285          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
12286              com.google.protobuf.ByteString data,
12287              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12288              throws com.google.protobuf.InvalidProtocolBufferException {
12289            return PARSER.parseFrom(data, extensionRegistry);
12290          }
12291          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(byte[] data)
12292              throws com.google.protobuf.InvalidProtocolBufferException {
12293            return PARSER.parseFrom(data);
12294          }
12295          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
12296              byte[] data,
12297              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12298              throws com.google.protobuf.InvalidProtocolBufferException {
12299            return PARSER.parseFrom(data, extensionRegistry);
12300          }
12301          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(java.io.InputStream input)
12302              throws java.io.IOException {
12303            return PARSER.parseFrom(input);
12304          }
12305          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
12306              java.io.InputStream input,
12307              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12308              throws java.io.IOException {
12309            return PARSER.parseFrom(input, extensionRegistry);
12310          }
12311          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseDelimitedFrom(java.io.InputStream input)
12312              throws java.io.IOException {
12313            return PARSER.parseDelimitedFrom(input);
12314          }
12315          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseDelimitedFrom(
12316              java.io.InputStream input,
12317              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12318              throws java.io.IOException {
12319            return PARSER.parseDelimitedFrom(input, extensionRegistry);
12320          }
12321          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
12322              com.google.protobuf.CodedInputStream input)
12323              throws java.io.IOException {
12324            return PARSER.parseFrom(input);
12325          }
12326          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
12327              com.google.protobuf.CodedInputStream input,
12328              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12329              throws java.io.IOException {
12330            return PARSER.parseFrom(input, extensionRegistry);
12331          }
12332    
12333          public static Builder newBuilder() { return Builder.create(); }
12334          public Builder newBuilderForType() { return newBuilder(); }
12335          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot prototype) {
12336            return newBuilder().mergeFrom(prototype);
12337          }
12338          public Builder toBuilder() { return newBuilder(this); }
12339    
12340          @java.lang.Override
12341          protected Builder newBuilderForType(
12342              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12343            Builder builder = new Builder(parent);
12344            return builder;
12345          }
12346          /**
12347           * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection.Snapshot}
12348           */
12349          public static final class Builder extends
12350              com.google.protobuf.GeneratedMessage.Builder<Builder>
12351             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.SnapshotOrBuilder {
12352            public static final com.google.protobuf.Descriptors.Descriptor
12353                getDescriptor() {
12354              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
12355            }
12356    
12357            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12358                internalGetFieldAccessorTable() {
12359              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable
12360                  .ensureFieldAccessorsInitialized(
12361                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.Builder.class);
12362            }
12363    
12364            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.newBuilder()
12365            private Builder() {
12366              maybeForceBuilderInitialization();
12367            }
12368    
12369            private Builder(
12370                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12371              super(parent);
12372              maybeForceBuilderInitialization();
12373            }
12374            private void maybeForceBuilderInitialization() {
12375              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12376                getRootFieldBuilder();
12377              }
12378            }
12379            private static Builder create() {
12380              return new Builder();
12381            }
12382    
12383            public Builder clear() {
12384              super.clear();
12385              snapshotId_ = 0;
12386              bitField0_ = (bitField0_ & ~0x00000001);
12387              if (rootBuilder_ == null) {
12388                root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
12389              } else {
12390                rootBuilder_.clear();
12391              }
12392              bitField0_ = (bitField0_ & ~0x00000002);
12393              return this;
12394            }
12395    
12396            public Builder clone() {
12397              return create().mergeFrom(buildPartial());
12398            }
12399    
12400            public com.google.protobuf.Descriptors.Descriptor
12401                getDescriptorForType() {
12402              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
12403            }
12404    
12405            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot getDefaultInstanceForType() {
12406              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.getDefaultInstance();
12407            }
12408    
12409            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot build() {
12410              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot result = buildPartial();
12411              if (!result.isInitialized()) {
12412                throw newUninitializedMessageException(result);
12413              }
12414              return result;
12415            }
12416    
12417            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot buildPartial() {
12418              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot(this);
12419              int from_bitField0_ = bitField0_;
12420              int to_bitField0_ = 0;
12421              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
12422                to_bitField0_ |= 0x00000001;
12423              }
12424              result.snapshotId_ = snapshotId_;
12425              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
12426                to_bitField0_ |= 0x00000002;
12427              }
12428              if (rootBuilder_ == null) {
12429                result.root_ = root_;
12430              } else {
12431                result.root_ = rootBuilder_.build();
12432              }
12433              result.bitField0_ = to_bitField0_;
12434              onBuilt();
12435              return result;
12436            }
12437    
12438            public Builder mergeFrom(com.google.protobuf.Message other) {
12439              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot) {
12440                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot)other);
12441              } else {
12442                super.mergeFrom(other);
12443                return this;
12444              }
12445            }
12446    
12447            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot other) {
12448              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.getDefaultInstance()) return this;
12449              if (other.hasSnapshotId()) {
12450                setSnapshotId(other.getSnapshotId());
12451              }
12452              if (other.hasRoot()) {
12453                mergeRoot(other.getRoot());
12454              }
12455              this.mergeUnknownFields(other.getUnknownFields());
12456              return this;
12457            }
12458    
12459            public final boolean isInitialized() {
12460              if (hasRoot()) {
12461                if (!getRoot().isInitialized()) {
12462                  
12463                  return false;
12464                }
12465              }
12466              return true;
12467            }
12468    
12469            public Builder mergeFrom(
12470                com.google.protobuf.CodedInputStream input,
12471                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12472                throws java.io.IOException {
12473              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parsedMessage = null;
12474              try {
12475                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
12476              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12477                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot) e.getUnfinishedMessage();
12478                throw e;
12479              } finally {
12480                if (parsedMessage != null) {
12481                  mergeFrom(parsedMessage);
12482                }
12483              }
12484              return this;
12485            }
12486            private int bitField0_;
12487    
12488            // optional uint32 snapshotId = 1;
12489            private int snapshotId_ ;
12490            /**
12491             * <code>optional uint32 snapshotId = 1;</code>
12492             */
12493            public boolean hasSnapshotId() {
12494              return ((bitField0_ & 0x00000001) == 0x00000001);
12495            }
12496            /**
12497             * <code>optional uint32 snapshotId = 1;</code>
12498             */
12499            public int getSnapshotId() {
12500              return snapshotId_;
12501            }
12502            /**
12503             * <code>optional uint32 snapshotId = 1;</code>
12504             */
12505            public Builder setSnapshotId(int value) {
12506              bitField0_ |= 0x00000001;
12507              snapshotId_ = value;
12508              onChanged();
12509              return this;
12510            }
12511            /**
12512             * <code>optional uint32 snapshotId = 1;</code>
12513             */
12514            public Builder clearSnapshotId() {
12515              bitField0_ = (bitField0_ & ~0x00000001);
12516              snapshotId_ = 0;
12517              onChanged();
12518              return this;
12519            }
12520    
12521            // optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;
12522            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
12523            private com.google.protobuf.SingleFieldBuilder<
12524                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder> rootBuilder_;
12525            /**
12526             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12527             *
12528             * <pre>
12529             * Snapshot root
12530             * </pre>
12531             */
12532            public boolean hasRoot() {
12533              return ((bitField0_ & 0x00000002) == 0x00000002);
12534            }
12535            /**
12536             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12537             *
12538             * <pre>
12539             * Snapshot root
12540             * </pre>
12541             */
12542            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getRoot() {
12543              if (rootBuilder_ == null) {
12544                return root_;
12545              } else {
12546                return rootBuilder_.getMessage();
12547              }
12548            }
12549            /**
12550             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12551             *
12552             * <pre>
12553             * Snapshot root
12554             * </pre>
12555             */
12556            public Builder setRoot(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode value) {
12557              if (rootBuilder_ == null) {
12558                if (value == null) {
12559                  throw new NullPointerException();
12560                }
12561                root_ = value;
12562                onChanged();
12563              } else {
12564                rootBuilder_.setMessage(value);
12565              }
12566              bitField0_ |= 0x00000002;
12567              return this;
12568            }
12569            /**
12570             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12571             *
12572             * <pre>
12573             * Snapshot root
12574             * </pre>
12575             */
12576            public Builder setRoot(
12577                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder builderForValue) {
12578              if (rootBuilder_ == null) {
12579                root_ = builderForValue.build();
12580                onChanged();
12581              } else {
12582                rootBuilder_.setMessage(builderForValue.build());
12583              }
12584              bitField0_ |= 0x00000002;
12585              return this;
12586            }
12587            /**
12588             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12589             *
12590             * <pre>
12591             * Snapshot root
12592             * </pre>
12593             */
12594            public Builder mergeRoot(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode value) {
12595              if (rootBuilder_ == null) {
12596                if (((bitField0_ & 0x00000002) == 0x00000002) &&
12597                    root_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance()) {
12598                  root_ =
12599                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.newBuilder(root_).mergeFrom(value).buildPartial();
12600                } else {
12601                  root_ = value;
12602                }
12603                onChanged();
12604              } else {
12605                rootBuilder_.mergeFrom(value);
12606              }
12607              bitField0_ |= 0x00000002;
12608              return this;
12609            }
12610            /**
12611             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12612             *
12613             * <pre>
12614             * Snapshot root
12615             * </pre>
12616             */
12617            public Builder clearRoot() {
12618              if (rootBuilder_ == null) {
12619                root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
12620                onChanged();
12621              } else {
12622                rootBuilder_.clear();
12623              }
12624              bitField0_ = (bitField0_ & ~0x00000002);
12625              return this;
12626            }
12627            /**
12628             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12629             *
12630             * <pre>
12631             * Snapshot root
12632             * </pre>
12633             */
12634            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder getRootBuilder() {
12635              bitField0_ |= 0x00000002;
12636              onChanged();
12637              return getRootFieldBuilder().getBuilder();
12638            }
12639            /**
12640             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12641             *
12642             * <pre>
12643             * Snapshot root
12644             * </pre>
12645             */
12646            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder getRootOrBuilder() {
12647              if (rootBuilder_ != null) {
12648                return rootBuilder_.getMessageOrBuilder();
12649              } else {
12650                return root_;
12651              }
12652            }
12653            /**
12654             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
12655             *
12656             * <pre>
12657             * Snapshot root
12658             * </pre>
12659             */
12660            private com.google.protobuf.SingleFieldBuilder<
12661                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder> 
12662                getRootFieldBuilder() {
12663              if (rootBuilder_ == null) {
12664                rootBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12665                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder>(
12666                        root_,
12667                        getParentForChildren(),
12668                        isClean());
12669                root_ = null;
12670              }
12671              return rootBuilder_;
12672            }
12673    
12674            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotSection.Snapshot)
12675          }
12676    
12677          static {
12678            defaultInstance = new Snapshot(true);
12679            defaultInstance.initFields();
12680          }
12681    
12682          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotSection.Snapshot)
12683        }
12684    
12685        private int bitField0_;
12686        // optional uint32 snapshotCounter = 1;
12687        public static final int SNAPSHOTCOUNTER_FIELD_NUMBER = 1;
12688        private int snapshotCounter_;
12689        /**
12690         * <code>optional uint32 snapshotCounter = 1;</code>
12691         */
12692        public boolean hasSnapshotCounter() {
12693          return ((bitField0_ & 0x00000001) == 0x00000001);
12694        }
12695        /**
12696         * <code>optional uint32 snapshotCounter = 1;</code>
12697         */
12698        public int getSnapshotCounter() {
12699          return snapshotCounter_;
12700        }
12701    
12702        // repeated uint64 snapshottableDir = 2 [packed = true];
12703        public static final int SNAPSHOTTABLEDIR_FIELD_NUMBER = 2;
12704        private java.util.List<java.lang.Long> snapshottableDir_;
12705        /**
12706         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
12707         */
12708        public java.util.List<java.lang.Long>
12709            getSnapshottableDirList() {
12710          return snapshottableDir_;
12711        }
12712        /**
12713         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
12714         */
12715        public int getSnapshottableDirCount() {
12716          return snapshottableDir_.size();
12717        }
12718        /**
12719         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
12720         */
12721        public long getSnapshottableDir(int index) {
12722          return snapshottableDir_.get(index);
12723        }
12724        private int snapshottableDirMemoizedSerializedSize = -1;
12725    
12726        // optional uint32 numSnapshots = 3;
12727        public static final int NUMSNAPSHOTS_FIELD_NUMBER = 3;
12728        private int numSnapshots_;
12729        /**
12730         * <code>optional uint32 numSnapshots = 3;</code>
12731         *
12732         * <pre>
12733         * total number of snapshots
12734         * </pre>
12735         */
12736        public boolean hasNumSnapshots() {
12737          return ((bitField0_ & 0x00000002) == 0x00000002);
12738        }
12739        /**
12740         * <code>optional uint32 numSnapshots = 3;</code>
12741         *
12742         * <pre>
12743         * total number of snapshots
12744         * </pre>
12745         */
12746        public int getNumSnapshots() {
12747          return numSnapshots_;
12748        }
12749    
12750        private void initFields() {
12751          snapshotCounter_ = 0;
12752          snapshottableDir_ = java.util.Collections.emptyList();
12753          numSnapshots_ = 0;
12754        }
12755        private byte memoizedIsInitialized = -1;
12756        public final boolean isInitialized() {
12757          byte isInitialized = memoizedIsInitialized;
12758          if (isInitialized != -1) return isInitialized == 1;
12759    
12760          memoizedIsInitialized = 1;
12761          return true;
12762        }
12763    
12764        public void writeTo(com.google.protobuf.CodedOutputStream output)
12765                            throws java.io.IOException {
12766          getSerializedSize();
12767          if (((bitField0_ & 0x00000001) == 0x00000001)) {
12768            output.writeUInt32(1, snapshotCounter_);
12769          }
12770          if (getSnapshottableDirList().size() > 0) {
12771            output.writeRawVarint32(18);
12772            output.writeRawVarint32(snapshottableDirMemoizedSerializedSize);
12773          }
12774          for (int i = 0; i < snapshottableDir_.size(); i++) {
12775            output.writeUInt64NoTag(snapshottableDir_.get(i));
12776          }
12777          if (((bitField0_ & 0x00000002) == 0x00000002)) {
12778            output.writeUInt32(3, numSnapshots_);
12779          }
12780          getUnknownFields().writeTo(output);
12781        }
12782    
12783        private int memoizedSerializedSize = -1;
12784        public int getSerializedSize() {
12785          int size = memoizedSerializedSize;
12786          if (size != -1) return size;
12787    
12788          size = 0;
12789          if (((bitField0_ & 0x00000001) == 0x00000001)) {
12790            size += com.google.protobuf.CodedOutputStream
12791              .computeUInt32Size(1, snapshotCounter_);
12792          }
12793          {
12794            int dataSize = 0;
12795            for (int i = 0; i < snapshottableDir_.size(); i++) {
12796              dataSize += com.google.protobuf.CodedOutputStream
12797                .computeUInt64SizeNoTag(snapshottableDir_.get(i));
12798            }
12799            size += dataSize;
12800            if (!getSnapshottableDirList().isEmpty()) {
12801              size += 1;
12802              size += com.google.protobuf.CodedOutputStream
12803                  .computeInt32SizeNoTag(dataSize);
12804            }
12805            snapshottableDirMemoizedSerializedSize = dataSize;
12806          }
12807          if (((bitField0_ & 0x00000002) == 0x00000002)) {
12808            size += com.google.protobuf.CodedOutputStream
12809              .computeUInt32Size(3, numSnapshots_);
12810          }
12811          size += getUnknownFields().getSerializedSize();
12812          memoizedSerializedSize = size;
12813          return size;
12814        }
12815    
12816        private static final long serialVersionUID = 0L;
12817        @java.lang.Override
12818        protected java.lang.Object writeReplace()
12819            throws java.io.ObjectStreamException {
12820          return super.writeReplace();
12821        }
12822    
12823        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
12824            com.google.protobuf.ByteString data)
12825            throws com.google.protobuf.InvalidProtocolBufferException {
12826          return PARSER.parseFrom(data);
12827        }
12828        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
12829            com.google.protobuf.ByteString data,
12830            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12831            throws com.google.protobuf.InvalidProtocolBufferException {
12832          return PARSER.parseFrom(data, extensionRegistry);
12833        }
12834        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(byte[] data)
12835            throws com.google.protobuf.InvalidProtocolBufferException {
12836          return PARSER.parseFrom(data);
12837        }
12838        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
12839            byte[] data,
12840            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12841            throws com.google.protobuf.InvalidProtocolBufferException {
12842          return PARSER.parseFrom(data, extensionRegistry);
12843        }
12844        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(java.io.InputStream input)
12845            throws java.io.IOException {
12846          return PARSER.parseFrom(input);
12847        }
12848        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
12849            java.io.InputStream input,
12850            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12851            throws java.io.IOException {
12852          return PARSER.parseFrom(input, extensionRegistry);
12853        }
12854        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseDelimitedFrom(java.io.InputStream input)
12855            throws java.io.IOException {
12856          return PARSER.parseDelimitedFrom(input);
12857        }
12858        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseDelimitedFrom(
12859            java.io.InputStream input,
12860            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12861            throws java.io.IOException {
12862          return PARSER.parseDelimitedFrom(input, extensionRegistry);
12863        }
12864        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
12865            com.google.protobuf.CodedInputStream input)
12866            throws java.io.IOException {
12867          return PARSER.parseFrom(input);
12868        }
12869        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
12870            com.google.protobuf.CodedInputStream input,
12871            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12872            throws java.io.IOException {
12873          return PARSER.parseFrom(input, extensionRegistry);
12874        }
12875    
12876        public static Builder newBuilder() { return Builder.create(); }
12877        public Builder newBuilderForType() { return newBuilder(); }
12878        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection prototype) {
12879          return newBuilder().mergeFrom(prototype);
12880        }
12881        public Builder toBuilder() { return newBuilder(this); }
12882    
12883        @java.lang.Override
12884        protected Builder newBuilderForType(
12885            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12886          Builder builder = new Builder(parent);
12887          return builder;
12888        }
12889        /**
12890         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection}
12891         *
12892         * <pre>
12893         **
12894         * This section records the information about snapshot
12895         * NAME: SNAPSHOT
12896         * </pre>
12897         */
12898        public static final class Builder extends
12899            com.google.protobuf.GeneratedMessage.Builder<Builder>
12900           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSectionOrBuilder {
12901          public static final com.google.protobuf.Descriptors.Descriptor
12902              getDescriptor() {
12903            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
12904          }
12905    
12906          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12907              internalGetFieldAccessorTable() {
12908            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable
12909                .ensureFieldAccessorsInitialized(
12910                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Builder.class);
12911          }
12912    
12913          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.newBuilder()
12914          private Builder() {
12915            maybeForceBuilderInitialization();
12916          }
12917    
12918          private Builder(
12919              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12920            super(parent);
12921            maybeForceBuilderInitialization();
12922          }
12923          private void maybeForceBuilderInitialization() {
12924            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12925            }
12926          }
12927          private static Builder create() {
12928            return new Builder();
12929          }
12930    
12931          public Builder clear() {
12932            super.clear();
12933            snapshotCounter_ = 0;
12934            bitField0_ = (bitField0_ & ~0x00000001);
12935            snapshottableDir_ = java.util.Collections.emptyList();
12936            bitField0_ = (bitField0_ & ~0x00000002);
12937            numSnapshots_ = 0;
12938            bitField0_ = (bitField0_ & ~0x00000004);
12939            return this;
12940          }
12941    
12942          public Builder clone() {
12943            return create().mergeFrom(buildPartial());
12944          }
12945    
12946          public com.google.protobuf.Descriptors.Descriptor
12947              getDescriptorForType() {
12948            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
12949          }
12950    
12951          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection getDefaultInstanceForType() {
12952            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.getDefaultInstance();
12953          }
12954    
12955          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection build() {
12956            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection result = buildPartial();
12957            if (!result.isInitialized()) {
12958              throw newUninitializedMessageException(result);
12959            }
12960            return result;
12961          }
12962    
12963          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection buildPartial() {
12964            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection(this);
12965            int from_bitField0_ = bitField0_;
12966            int to_bitField0_ = 0;
12967            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
12968              to_bitField0_ |= 0x00000001;
12969            }
12970            result.snapshotCounter_ = snapshotCounter_;
12971            if (((bitField0_ & 0x00000002) == 0x00000002)) {
12972              snapshottableDir_ = java.util.Collections.unmodifiableList(snapshottableDir_);
12973              bitField0_ = (bitField0_ & ~0x00000002);
12974            }
12975            result.snapshottableDir_ = snapshottableDir_;
12976            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
12977              to_bitField0_ |= 0x00000002;
12978            }
12979            result.numSnapshots_ = numSnapshots_;
12980            result.bitField0_ = to_bitField0_;
12981            onBuilt();
12982            return result;
12983          }
12984    
12985          public Builder mergeFrom(com.google.protobuf.Message other) {
12986            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection) {
12987              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection)other);
12988            } else {
12989              super.mergeFrom(other);
12990              return this;
12991            }
12992          }
12993    
12994          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection other) {
12995            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.getDefaultInstance()) return this;
12996            if (other.hasSnapshotCounter()) {
12997              setSnapshotCounter(other.getSnapshotCounter());
12998            }
12999            if (!other.snapshottableDir_.isEmpty()) {
13000              if (snapshottableDir_.isEmpty()) {
13001                snapshottableDir_ = other.snapshottableDir_;
13002                bitField0_ = (bitField0_ & ~0x00000002);
13003              } else {
13004                ensureSnapshottableDirIsMutable();
13005                snapshottableDir_.addAll(other.snapshottableDir_);
13006              }
13007              onChanged();
13008            }
13009            if (other.hasNumSnapshots()) {
13010              setNumSnapshots(other.getNumSnapshots());
13011            }
13012            this.mergeUnknownFields(other.getUnknownFields());
13013            return this;
13014          }
13015    
13016          public final boolean isInitialized() {
13017            return true;
13018          }
13019    
13020          public Builder mergeFrom(
13021              com.google.protobuf.CodedInputStream input,
13022              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13023              throws java.io.IOException {
13024            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parsedMessage = null;
13025            try {
13026              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13027            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13028              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection) e.getUnfinishedMessage();
13029              throw e;
13030            } finally {
13031              if (parsedMessage != null) {
13032                mergeFrom(parsedMessage);
13033              }
13034            }
13035            return this;
13036          }
13037          private int bitField0_;
13038    
13039          // optional uint32 snapshotCounter = 1;
13040          private int snapshotCounter_ ;
13041          /**
13042           * <code>optional uint32 snapshotCounter = 1;</code>
13043           */
13044          public boolean hasSnapshotCounter() {
13045            return ((bitField0_ & 0x00000001) == 0x00000001);
13046          }
13047          /**
13048           * <code>optional uint32 snapshotCounter = 1;</code>
13049           */
13050          public int getSnapshotCounter() {
13051            return snapshotCounter_;
13052          }
13053          /**
13054           * <code>optional uint32 snapshotCounter = 1;</code>
13055           */
13056          public Builder setSnapshotCounter(int value) {
13057            bitField0_ |= 0x00000001;
13058            snapshotCounter_ = value;
13059            onChanged();
13060            return this;
13061          }
13062          /**
13063           * <code>optional uint32 snapshotCounter = 1;</code>
13064           */
13065          public Builder clearSnapshotCounter() {
13066            bitField0_ = (bitField0_ & ~0x00000001);
13067            snapshotCounter_ = 0;
13068            onChanged();
13069            return this;
13070          }
13071    
13072          // repeated uint64 snapshottableDir = 2 [packed = true];
13073          private java.util.List<java.lang.Long> snapshottableDir_ = java.util.Collections.emptyList();
13074          private void ensureSnapshottableDirIsMutable() {
13075            if (!((bitField0_ & 0x00000002) == 0x00000002)) {
13076              snapshottableDir_ = new java.util.ArrayList<java.lang.Long>(snapshottableDir_);
13077              bitField0_ |= 0x00000002;
13078             }
13079          }
13080          /**
13081           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13082           */
13083          public java.util.List<java.lang.Long>
13084              getSnapshottableDirList() {
13085            return java.util.Collections.unmodifiableList(snapshottableDir_);
13086          }
13087          /**
13088           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13089           */
13090          public int getSnapshottableDirCount() {
13091            return snapshottableDir_.size();
13092          }
13093          /**
13094           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13095           */
13096          public long getSnapshottableDir(int index) {
13097            return snapshottableDir_.get(index);
13098          }
13099          /**
13100           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13101           */
13102          public Builder setSnapshottableDir(
13103              int index, long value) {
13104            ensureSnapshottableDirIsMutable();
13105            snapshottableDir_.set(index, value);
13106            onChanged();
13107            return this;
13108          }
13109          /**
13110           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13111           */
13112          public Builder addSnapshottableDir(long value) {
13113            ensureSnapshottableDirIsMutable();
13114            snapshottableDir_.add(value);
13115            onChanged();
13116            return this;
13117          }
13118          /**
13119           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13120           */
13121          public Builder addAllSnapshottableDir(
13122              java.lang.Iterable<? extends java.lang.Long> values) {
13123            ensureSnapshottableDirIsMutable();
13124            super.addAll(values, snapshottableDir_);
13125            onChanged();
13126            return this;
13127          }
13128          /**
13129           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13130           */
13131          public Builder clearSnapshottableDir() {
13132            snapshottableDir_ = java.util.Collections.emptyList();
13133            bitField0_ = (bitField0_ & ~0x00000002);
13134            onChanged();
13135            return this;
13136          }
13137    
13138          // optional uint32 numSnapshots = 3;
13139          private int numSnapshots_ ;
13140          /**
13141           * <code>optional uint32 numSnapshots = 3;</code>
13142           *
13143           * <pre>
13144           * total number of snapshots
13145           * </pre>
13146           */
13147          public boolean hasNumSnapshots() {
13148            return ((bitField0_ & 0x00000004) == 0x00000004);
13149          }
13150          /**
13151           * <code>optional uint32 numSnapshots = 3;</code>
13152           *
13153           * <pre>
13154           * total number of snapshots
13155           * </pre>
13156           */
13157          public int getNumSnapshots() {
13158            return numSnapshots_;
13159          }
13160          /**
13161           * <code>optional uint32 numSnapshots = 3;</code>
13162           *
13163           * <pre>
13164           * total number of snapshots
13165           * </pre>
13166           */
13167          public Builder setNumSnapshots(int value) {
13168            bitField0_ |= 0x00000004;
13169            numSnapshots_ = value;
13170            onChanged();
13171            return this;
13172          }
13173          /**
13174           * <code>optional uint32 numSnapshots = 3;</code>
13175           *
13176           * <pre>
13177           * total number of snapshots
13178           * </pre>
13179           */
13180          public Builder clearNumSnapshots() {
13181            bitField0_ = (bitField0_ & ~0x00000004);
13182            numSnapshots_ = 0;
13183            onChanged();
13184            return this;
13185          }
13186    
13187          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotSection)
13188        }
13189    
13190        static {
13191          defaultInstance = new SnapshotSection(true);
13192          defaultInstance.initFields();
13193        }
13194    
13195        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotSection)
13196      }
13197    
13198      public interface SnapshotDiffSectionOrBuilder
13199          extends com.google.protobuf.MessageOrBuilder {
13200      }
13201      /**
13202       * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection}
13203       *
13204       * <pre>
13205       **
13206       * This section records information about snapshot diffs
13207       * NAME: SNAPSHOT_DIFF
13208       * </pre>
13209       */
13210      public static final class SnapshotDiffSection extends
13211          com.google.protobuf.GeneratedMessage
13212          implements SnapshotDiffSectionOrBuilder {
13213        // Use SnapshotDiffSection.newBuilder() to construct.
13214        private SnapshotDiffSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13215          super(builder);
13216          this.unknownFields = builder.getUnknownFields();
13217        }
13218        private SnapshotDiffSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13219    
13220        private static final SnapshotDiffSection defaultInstance;
13221        public static SnapshotDiffSection getDefaultInstance() {
13222          return defaultInstance;
13223        }
13224    
13225        public SnapshotDiffSection getDefaultInstanceForType() {
13226          return defaultInstance;
13227        }
13228    
13229        private final com.google.protobuf.UnknownFieldSet unknownFields;
13230        @java.lang.Override
13231        public final com.google.protobuf.UnknownFieldSet
13232            getUnknownFields() {
13233          return this.unknownFields;
13234        }
13235        private SnapshotDiffSection(
13236            com.google.protobuf.CodedInputStream input,
13237            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13238            throws com.google.protobuf.InvalidProtocolBufferException {
13239          initFields();
13240          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13241              com.google.protobuf.UnknownFieldSet.newBuilder();
13242          try {
13243            boolean done = false;
13244            while (!done) {
13245              int tag = input.readTag();
13246              switch (tag) {
13247                case 0:
13248                  done = true;
13249                  break;
13250                default: {
13251                  if (!parseUnknownField(input, unknownFields,
13252                                         extensionRegistry, tag)) {
13253                    done = true;
13254                  }
13255                  break;
13256                }
13257              }
13258            }
13259          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13260            throw e.setUnfinishedMessage(this);
13261          } catch (java.io.IOException e) {
13262            throw new com.google.protobuf.InvalidProtocolBufferException(
13263                e.getMessage()).setUnfinishedMessage(this);
13264          } finally {
13265            this.unknownFields = unknownFields.build();
13266            makeExtensionsImmutable();
13267          }
13268        }
13269        public static final com.google.protobuf.Descriptors.Descriptor
13270            getDescriptor() {
13271          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
13272        }
13273    
13274        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13275            internalGetFieldAccessorTable() {
13276          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable
13277              .ensureFieldAccessorsInitialized(
13278                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.Builder.class);
13279        }
13280    
13281        public static com.google.protobuf.Parser<SnapshotDiffSection> PARSER =
13282            new com.google.protobuf.AbstractParser<SnapshotDiffSection>() {
13283          public SnapshotDiffSection parsePartialFrom(
13284              com.google.protobuf.CodedInputStream input,
13285              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13286              throws com.google.protobuf.InvalidProtocolBufferException {
13287            return new SnapshotDiffSection(input, extensionRegistry);
13288          }
13289        };
13290    
13291        @java.lang.Override
13292        public com.google.protobuf.Parser<SnapshotDiffSection> getParserForType() {
13293          return PARSER;
13294        }
13295    
13296        public interface CreatedListEntryOrBuilder
13297            extends com.google.protobuf.MessageOrBuilder {
13298    
13299          // optional bytes name = 1;
13300          /**
13301           * <code>optional bytes name = 1;</code>
13302           */
13303          boolean hasName();
13304          /**
13305           * <code>optional bytes name = 1;</code>
13306           */
13307          com.google.protobuf.ByteString getName();
13308        }
13309        /**
13310         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry}
13311         */
13312        public static final class CreatedListEntry extends
13313            com.google.protobuf.GeneratedMessage
13314            implements CreatedListEntryOrBuilder {
13315          // Use CreatedListEntry.newBuilder() to construct.
13316          private CreatedListEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13317            super(builder);
13318            this.unknownFields = builder.getUnknownFields();
13319          }
13320          private CreatedListEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13321    
13322          private static final CreatedListEntry defaultInstance;
13323          public static CreatedListEntry getDefaultInstance() {
13324            return defaultInstance;
13325          }
13326    
13327          public CreatedListEntry getDefaultInstanceForType() {
13328            return defaultInstance;
13329          }
13330    
13331          private final com.google.protobuf.UnknownFieldSet unknownFields;
13332          @java.lang.Override
13333          public final com.google.protobuf.UnknownFieldSet
13334              getUnknownFields() {
13335            return this.unknownFields;
13336          }
13337          private CreatedListEntry(
13338              com.google.protobuf.CodedInputStream input,
13339              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13340              throws com.google.protobuf.InvalidProtocolBufferException {
13341            initFields();
13342            int mutable_bitField0_ = 0;
13343            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13344                com.google.protobuf.UnknownFieldSet.newBuilder();
13345            try {
13346              boolean done = false;
13347              while (!done) {
13348                int tag = input.readTag();
13349                switch (tag) {
13350                  case 0:
13351                    done = true;
13352                    break;
13353                  default: {
13354                    if (!parseUnknownField(input, unknownFields,
13355                                           extensionRegistry, tag)) {
13356                      done = true;
13357                    }
13358                    break;
13359                  }
13360                  case 10: {
13361                    bitField0_ |= 0x00000001;
13362                    name_ = input.readBytes();
13363                    break;
13364                  }
13365                }
13366              }
13367            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13368              throw e.setUnfinishedMessage(this);
13369            } catch (java.io.IOException e) {
13370              throw new com.google.protobuf.InvalidProtocolBufferException(
13371                  e.getMessage()).setUnfinishedMessage(this);
13372            } finally {
13373              this.unknownFields = unknownFields.build();
13374              makeExtensionsImmutable();
13375            }
13376          }
13377          public static final com.google.protobuf.Descriptors.Descriptor
13378              getDescriptor() {
13379            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
13380          }
13381    
13382          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13383              internalGetFieldAccessorTable() {
13384            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable
13385                .ensureFieldAccessorsInitialized(
13386                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.Builder.class);
13387          }
13388    
13389          public static com.google.protobuf.Parser<CreatedListEntry> PARSER =
13390              new com.google.protobuf.AbstractParser<CreatedListEntry>() {
13391            public CreatedListEntry parsePartialFrom(
13392                com.google.protobuf.CodedInputStream input,
13393                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13394                throws com.google.protobuf.InvalidProtocolBufferException {
13395              return new CreatedListEntry(input, extensionRegistry);
13396            }
13397          };
13398    
13399          @java.lang.Override
13400          public com.google.protobuf.Parser<CreatedListEntry> getParserForType() {
13401            return PARSER;
13402          }
13403    
13404          private int bitField0_;
13405          // optional bytes name = 1;
13406          public static final int NAME_FIELD_NUMBER = 1;
13407          private com.google.protobuf.ByteString name_;
13408          /**
13409           * <code>optional bytes name = 1;</code>
13410           */
13411          public boolean hasName() {
13412            return ((bitField0_ & 0x00000001) == 0x00000001);
13413          }
13414          /**
13415           * <code>optional bytes name = 1;</code>
13416           */
13417          public com.google.protobuf.ByteString getName() {
13418            return name_;
13419          }
13420    
13421          private void initFields() {
13422            name_ = com.google.protobuf.ByteString.EMPTY;
13423          }
13424          private byte memoizedIsInitialized = -1;
13425          public final boolean isInitialized() {
13426            byte isInitialized = memoizedIsInitialized;
13427            if (isInitialized != -1) return isInitialized == 1;
13428    
13429            memoizedIsInitialized = 1;
13430            return true;
13431          }
13432    
13433          public void writeTo(com.google.protobuf.CodedOutputStream output)
13434                              throws java.io.IOException {
13435            getSerializedSize();
13436            if (((bitField0_ & 0x00000001) == 0x00000001)) {
13437              output.writeBytes(1, name_);
13438            }
13439            getUnknownFields().writeTo(output);
13440          }
13441    
13442          private int memoizedSerializedSize = -1;
13443          public int getSerializedSize() {
13444            int size = memoizedSerializedSize;
13445            if (size != -1) return size;
13446    
13447            size = 0;
13448            if (((bitField0_ & 0x00000001) == 0x00000001)) {
13449              size += com.google.protobuf.CodedOutputStream
13450                .computeBytesSize(1, name_);
13451            }
13452            size += getUnknownFields().getSerializedSize();
13453            memoizedSerializedSize = size;
13454            return size;
13455          }
13456    
13457          private static final long serialVersionUID = 0L;
13458          @java.lang.Override
13459          protected java.lang.Object writeReplace()
13460              throws java.io.ObjectStreamException {
13461            return super.writeReplace();
13462          }
13463    
13464          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
13465              com.google.protobuf.ByteString data)
13466              throws com.google.protobuf.InvalidProtocolBufferException {
13467            return PARSER.parseFrom(data);
13468          }
13469          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
13470              com.google.protobuf.ByteString data,
13471              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13472              throws com.google.protobuf.InvalidProtocolBufferException {
13473            return PARSER.parseFrom(data, extensionRegistry);
13474          }
13475          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(byte[] data)
13476              throws com.google.protobuf.InvalidProtocolBufferException {
13477            return PARSER.parseFrom(data);
13478          }
13479          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
13480              byte[] data,
13481              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13482              throws com.google.protobuf.InvalidProtocolBufferException {
13483            return PARSER.parseFrom(data, extensionRegistry);
13484          }
13485          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(java.io.InputStream input)
13486              throws java.io.IOException {
13487            return PARSER.parseFrom(input);
13488          }
13489          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
13490              java.io.InputStream input,
13491              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13492              throws java.io.IOException {
13493            return PARSER.parseFrom(input, extensionRegistry);
13494          }
13495          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseDelimitedFrom(java.io.InputStream input)
13496              throws java.io.IOException {
13497            return PARSER.parseDelimitedFrom(input);
13498          }
13499          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseDelimitedFrom(
13500              java.io.InputStream input,
13501              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13502              throws java.io.IOException {
13503            return PARSER.parseDelimitedFrom(input, extensionRegistry);
13504          }
13505          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
13506              com.google.protobuf.CodedInputStream input)
13507              throws java.io.IOException {
13508            return PARSER.parseFrom(input);
13509          }
13510          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
13511              com.google.protobuf.CodedInputStream input,
13512              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13513              throws java.io.IOException {
13514            return PARSER.parseFrom(input, extensionRegistry);
13515          }
13516    
13517          public static Builder newBuilder() { return Builder.create(); }
13518          public Builder newBuilderForType() { return newBuilder(); }
13519          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry prototype) {
13520            return newBuilder().mergeFrom(prototype);
13521          }
13522          public Builder toBuilder() { return newBuilder(this); }
13523    
13524          @java.lang.Override
13525          protected Builder newBuilderForType(
13526              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13527            Builder builder = new Builder(parent);
13528            return builder;
13529          }
13530          /**
13531           * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry}
13532           */
13533          public static final class Builder extends
13534              com.google.protobuf.GeneratedMessage.Builder<Builder>
13535             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntryOrBuilder {
13536            public static final com.google.protobuf.Descriptors.Descriptor
13537                getDescriptor() {
13538              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
13539            }
13540    
13541            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13542                internalGetFieldAccessorTable() {
13543              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable
13544                  .ensureFieldAccessorsInitialized(
13545                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.Builder.class);
13546            }
13547    
13548            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.newBuilder()
13549            private Builder() {
13550              maybeForceBuilderInitialization();
13551            }
13552    
13553            private Builder(
13554                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13555              super(parent);
13556              maybeForceBuilderInitialization();
13557            }
13558            private void maybeForceBuilderInitialization() {
13559              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13560              }
13561            }
13562            private static Builder create() {
13563              return new Builder();
13564            }
13565    
13566            public Builder clear() {
13567              super.clear();
13568              name_ = com.google.protobuf.ByteString.EMPTY;
13569              bitField0_ = (bitField0_ & ~0x00000001);
13570              return this;
13571            }
13572    
13573            public Builder clone() {
13574              return create().mergeFrom(buildPartial());
13575            }
13576    
13577            public com.google.protobuf.Descriptors.Descriptor
13578                getDescriptorForType() {
13579              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
13580            }
13581    
13582            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry getDefaultInstanceForType() {
13583              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.getDefaultInstance();
13584            }
13585    
13586            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry build() {
13587              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry result = buildPartial();
13588              if (!result.isInitialized()) {
13589                throw newUninitializedMessageException(result);
13590              }
13591              return result;
13592            }
13593    
13594            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry buildPartial() {
13595              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry(this);
13596              int from_bitField0_ = bitField0_;
13597              int to_bitField0_ = 0;
13598              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
13599                to_bitField0_ |= 0x00000001;
13600              }
13601              result.name_ = name_;
13602              result.bitField0_ = to_bitField0_;
13603              onBuilt();
13604              return result;
13605            }
13606    
13607            public Builder mergeFrom(com.google.protobuf.Message other) {
13608              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry) {
13609                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry)other);
13610              } else {
13611                super.mergeFrom(other);
13612                return this;
13613              }
13614            }
13615    
13616            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry other) {
13617              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.getDefaultInstance()) return this;
13618              if (other.hasName()) {
13619                setName(other.getName());
13620              }
13621              this.mergeUnknownFields(other.getUnknownFields());
13622              return this;
13623            }
13624    
13625            public final boolean isInitialized() {
13626              return true;
13627            }
13628    
13629            public Builder mergeFrom(
13630                com.google.protobuf.CodedInputStream input,
13631                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13632                throws java.io.IOException {
13633              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parsedMessage = null;
13634              try {
13635                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13636              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13637                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry) e.getUnfinishedMessage();
13638                throw e;
13639              } finally {
13640                if (parsedMessage != null) {
13641                  mergeFrom(parsedMessage);
13642                }
13643              }
13644              return this;
13645            }
13646            private int bitField0_;
13647    
13648            // optional bytes name = 1;
13649            private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
13650            /**
13651             * <code>optional bytes name = 1;</code>
13652             */
13653            public boolean hasName() {
13654              return ((bitField0_ & 0x00000001) == 0x00000001);
13655            }
13656            /**
13657             * <code>optional bytes name = 1;</code>
13658             */
13659            public com.google.protobuf.ByteString getName() {
13660              return name_;
13661            }
13662            /**
13663             * <code>optional bytes name = 1;</code>
13664             */
13665            public Builder setName(com.google.protobuf.ByteString value) {
13666              if (value == null) {
13667        throw new NullPointerException();
13668      }
13669      bitField0_ |= 0x00000001;
13670              name_ = value;
13671              onChanged();
13672              return this;
13673            }
13674            /**
13675             * <code>optional bytes name = 1;</code>
13676             */
13677            public Builder clearName() {
13678              bitField0_ = (bitField0_ & ~0x00000001);
13679              name_ = getDefaultInstance().getName();
13680              onChanged();
13681              return this;
13682            }
13683    
13684            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry)
13685          }
13686    
13687          static {
13688            defaultInstance = new CreatedListEntry(true);
13689            defaultInstance.initFields();
13690          }
13691    
13692          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry)
13693        }
13694    
13695        public interface DirectoryDiffOrBuilder
13696            extends com.google.protobuf.MessageOrBuilder {
13697    
13698          // optional uint32 snapshotId = 1;
13699          /**
13700           * <code>optional uint32 snapshotId = 1;</code>
13701           */
13702          boolean hasSnapshotId();
13703          /**
13704           * <code>optional uint32 snapshotId = 1;</code>
13705           */
13706          int getSnapshotId();
13707    
13708          // optional uint32 childrenSize = 2;
13709          /**
13710           * <code>optional uint32 childrenSize = 2;</code>
13711           */
13712          boolean hasChildrenSize();
13713          /**
13714           * <code>optional uint32 childrenSize = 2;</code>
13715           */
13716          int getChildrenSize();
13717    
13718          // optional bool isSnapshotRoot = 3;
13719          /**
13720           * <code>optional bool isSnapshotRoot = 3;</code>
13721           */
13722          boolean hasIsSnapshotRoot();
13723          /**
13724           * <code>optional bool isSnapshotRoot = 3;</code>
13725           */
13726          boolean getIsSnapshotRoot();
13727    
13728          // optional bytes name = 4;
13729          /**
13730           * <code>optional bytes name = 4;</code>
13731           */
13732          boolean hasName();
13733          /**
13734           * <code>optional bytes name = 4;</code>
13735           */
13736          com.google.protobuf.ByteString getName();
13737    
13738          // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;
13739          /**
13740           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
13741           */
13742          boolean hasSnapshotCopy();
13743          /**
13744           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
13745           */
13746          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getSnapshotCopy();
13747          /**
13748           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
13749           */
13750          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getSnapshotCopyOrBuilder();
13751    
13752          // optional uint32 createdListSize = 6;
13753          /**
13754           * <code>optional uint32 createdListSize = 6;</code>
13755           */
13756          boolean hasCreatedListSize();
13757          /**
13758           * <code>optional uint32 createdListSize = 6;</code>
13759           */
13760          int getCreatedListSize();
13761    
13762          // repeated uint64 deletedINode = 7 [packed = true];
13763          /**
13764           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
13765           *
13766           * <pre>
13767           * id of deleted inodes
13768           * </pre>
13769           */
13770          java.util.List<java.lang.Long> getDeletedINodeList();
13771          /**
13772           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
13773           *
13774           * <pre>
13775           * id of deleted inodes
13776           * </pre>
13777           */
13778          int getDeletedINodeCount();
13779          /**
13780           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
13781           *
13782           * <pre>
13783           * id of deleted inodes
13784           * </pre>
13785           */
13786          long getDeletedINode(int index);
13787    
13788          // repeated uint32 deletedINodeRef = 8 [packed = true];
13789          /**
13790           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
13791           *
13792           * <pre>
13793           * id of reference nodes in the deleted list
13794           * </pre>
13795           */
13796          java.util.List<java.lang.Integer> getDeletedINodeRefList();
13797          /**
13798           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
13799           *
13800           * <pre>
13801           * id of reference nodes in the deleted list
13802           * </pre>
13803           */
13804          int getDeletedINodeRefCount();
13805          /**
13806           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
13807           *
13808           * <pre>
13809           * id of reference nodes in the deleted list
13810           * </pre>
13811           */
13812          int getDeletedINodeRef(int index);
13813        }
13814        /**
13815         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff}
13816         */
13817        public static final class DirectoryDiff extends
13818            com.google.protobuf.GeneratedMessage
13819            implements DirectoryDiffOrBuilder {
13820          // Use DirectoryDiff.newBuilder() to construct.
13821          private DirectoryDiff(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13822            super(builder);
13823            this.unknownFields = builder.getUnknownFields();
13824          }
13825          private DirectoryDiff(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13826    
13827          private static final DirectoryDiff defaultInstance;
13828          public static DirectoryDiff getDefaultInstance() {
13829            return defaultInstance;
13830          }
13831    
13832          public DirectoryDiff getDefaultInstanceForType() {
13833            return defaultInstance;
13834          }
13835    
13836          private final com.google.protobuf.UnknownFieldSet unknownFields;
13837          @java.lang.Override
13838          public final com.google.protobuf.UnknownFieldSet
13839              getUnknownFields() {
13840            return this.unknownFields;
13841          }
13842          private DirectoryDiff(
13843              com.google.protobuf.CodedInputStream input,
13844              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13845              throws com.google.protobuf.InvalidProtocolBufferException {
13846            initFields();
13847            int mutable_bitField0_ = 0;
13848            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13849                com.google.protobuf.UnknownFieldSet.newBuilder();
13850            try {
13851              boolean done = false;
13852              while (!done) {
13853                int tag = input.readTag();
13854                switch (tag) {
13855                  case 0:
13856                    done = true;
13857                    break;
13858                  default: {
13859                    if (!parseUnknownField(input, unknownFields,
13860                                           extensionRegistry, tag)) {
13861                      done = true;
13862                    }
13863                    break;
13864                  }
13865                  case 8: {
13866                    bitField0_ |= 0x00000001;
13867                    snapshotId_ = input.readUInt32();
13868                    break;
13869                  }
13870                  case 16: {
13871                    bitField0_ |= 0x00000002;
13872                    childrenSize_ = input.readUInt32();
13873                    break;
13874                  }
13875                  case 24: {
13876                    bitField0_ |= 0x00000004;
13877                    isSnapshotRoot_ = input.readBool();
13878                    break;
13879                  }
13880                  case 34: {
13881                    bitField0_ |= 0x00000008;
13882                    name_ = input.readBytes();
13883                    break;
13884                  }
13885                  case 42: {
13886                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder subBuilder = null;
13887                    if (((bitField0_ & 0x00000010) == 0x00000010)) {
13888                      subBuilder = snapshotCopy_.toBuilder();
13889                    }
13890                    snapshotCopy_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.PARSER, extensionRegistry);
13891                    if (subBuilder != null) {
13892                      subBuilder.mergeFrom(snapshotCopy_);
13893                      snapshotCopy_ = subBuilder.buildPartial();
13894                    }
13895                    bitField0_ |= 0x00000010;
13896                    break;
13897                  }
13898                  case 48: {
13899                    bitField0_ |= 0x00000020;
13900                    createdListSize_ = input.readUInt32();
13901                    break;
13902                  }
13903                  case 56: {
13904                    if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
13905                      deletedINode_ = new java.util.ArrayList<java.lang.Long>();
13906                      mutable_bitField0_ |= 0x00000040;
13907                    }
13908                    deletedINode_.add(input.readUInt64());
13909                    break;
13910                  }
13911                  case 58: {
13912                    int length = input.readRawVarint32();
13913                    int limit = input.pushLimit(length);
13914                    if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) {
13915                      deletedINode_ = new java.util.ArrayList<java.lang.Long>();
13916                      mutable_bitField0_ |= 0x00000040;
13917                    }
13918                    while (input.getBytesUntilLimit() > 0) {
13919                      deletedINode_.add(input.readUInt64());
13920                    }
13921                    input.popLimit(limit);
13922                    break;
13923                  }
13924                  case 64: {
13925                    if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
13926                      deletedINodeRef_ = new java.util.ArrayList<java.lang.Integer>();
13927                      mutable_bitField0_ |= 0x00000080;
13928                    }
13929                    deletedINodeRef_.add(input.readUInt32());
13930                    break;
13931                  }
13932                  case 66: {
13933                    int length = input.readRawVarint32();
13934                    int limit = input.pushLimit(length);
13935                    if (!((mutable_bitField0_ & 0x00000080) == 0x00000080) && input.getBytesUntilLimit() > 0) {
13936                      deletedINodeRef_ = new java.util.ArrayList<java.lang.Integer>();
13937                      mutable_bitField0_ |= 0x00000080;
13938                    }
13939                    while (input.getBytesUntilLimit() > 0) {
13940                      deletedINodeRef_.add(input.readUInt32());
13941                    }
13942                    input.popLimit(limit);
13943                    break;
13944                  }
13945                }
13946              }
13947            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13948              throw e.setUnfinishedMessage(this);
13949            } catch (java.io.IOException e) {
13950              throw new com.google.protobuf.InvalidProtocolBufferException(
13951                  e.getMessage()).setUnfinishedMessage(this);
13952            } finally {
13953              if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
13954                deletedINode_ = java.util.Collections.unmodifiableList(deletedINode_);
13955              }
13956              if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
13957                deletedINodeRef_ = java.util.Collections.unmodifiableList(deletedINodeRef_);
13958              }
13959              this.unknownFields = unknownFields.build();
13960              makeExtensionsImmutable();
13961            }
13962          }
13963          public static final com.google.protobuf.Descriptors.Descriptor
13964              getDescriptor() {
13965            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
13966          }
13967    
13968          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13969              internalGetFieldAccessorTable() {
13970            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable
13971                .ensureFieldAccessorsInitialized(
13972                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.Builder.class);
13973          }
13974    
13975          public static com.google.protobuf.Parser<DirectoryDiff> PARSER =
13976              new com.google.protobuf.AbstractParser<DirectoryDiff>() {
13977            public DirectoryDiff parsePartialFrom(
13978                com.google.protobuf.CodedInputStream input,
13979                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13980                throws com.google.protobuf.InvalidProtocolBufferException {
13981              return new DirectoryDiff(input, extensionRegistry);
13982            }
13983          };
13984    
13985          @java.lang.Override
13986          public com.google.protobuf.Parser<DirectoryDiff> getParserForType() {
13987            return PARSER;
13988          }
13989    
13990          private int bitField0_;
13991          // optional uint32 snapshotId = 1;
13992          public static final int SNAPSHOTID_FIELD_NUMBER = 1;
13993          private int snapshotId_;
13994          /**
13995           * <code>optional uint32 snapshotId = 1;</code>
13996           */
13997          public boolean hasSnapshotId() {
13998            return ((bitField0_ & 0x00000001) == 0x00000001);
13999          }
14000          /**
14001           * <code>optional uint32 snapshotId = 1;</code>
14002           */
14003          public int getSnapshotId() {
14004            return snapshotId_;
14005          }
14006    
14007          // optional uint32 childrenSize = 2;
14008          public static final int CHILDRENSIZE_FIELD_NUMBER = 2;
14009          private int childrenSize_;
14010          /**
14011           * <code>optional uint32 childrenSize = 2;</code>
14012           */
14013          public boolean hasChildrenSize() {
14014            return ((bitField0_ & 0x00000002) == 0x00000002);
14015          }
14016          /**
14017           * <code>optional uint32 childrenSize = 2;</code>
14018           */
14019          public int getChildrenSize() {
14020            return childrenSize_;
14021          }
14022    
14023          // optional bool isSnapshotRoot = 3;
14024          public static final int ISSNAPSHOTROOT_FIELD_NUMBER = 3;
14025          private boolean isSnapshotRoot_;
14026          /**
14027           * <code>optional bool isSnapshotRoot = 3;</code>
14028           */
14029          public boolean hasIsSnapshotRoot() {
14030            return ((bitField0_ & 0x00000004) == 0x00000004);
14031          }
14032          /**
14033           * <code>optional bool isSnapshotRoot = 3;</code>
14034           */
14035          public boolean getIsSnapshotRoot() {
14036            return isSnapshotRoot_;
14037          }
14038    
14039          // optional bytes name = 4;
14040          public static final int NAME_FIELD_NUMBER = 4;
14041          private com.google.protobuf.ByteString name_;
14042          /**
14043           * <code>optional bytes name = 4;</code>
14044           */
14045          public boolean hasName() {
14046            return ((bitField0_ & 0x00000008) == 0x00000008);
14047          }
14048          /**
14049           * <code>optional bytes name = 4;</code>
14050           */
14051          public com.google.protobuf.ByteString getName() {
14052            return name_;
14053          }
14054    
14055          // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;
14056          public static final int SNAPSHOTCOPY_FIELD_NUMBER = 5;
14057          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory snapshotCopy_;
14058          /**
14059           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14060           */
14061          public boolean hasSnapshotCopy() {
14062            return ((bitField0_ & 0x00000010) == 0x00000010);
14063          }
14064          /**
14065           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14066           */
14067          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getSnapshotCopy() {
14068            return snapshotCopy_;
14069          }
14070          /**
14071           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14072           */
14073          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getSnapshotCopyOrBuilder() {
14074            return snapshotCopy_;
14075          }
14076    
14077          // optional uint32 createdListSize = 6;
14078          public static final int CREATEDLISTSIZE_FIELD_NUMBER = 6;
14079          private int createdListSize_;
14080          /**
14081           * <code>optional uint32 createdListSize = 6;</code>
14082           */
14083          public boolean hasCreatedListSize() {
14084            return ((bitField0_ & 0x00000020) == 0x00000020);
14085          }
14086          /**
14087           * <code>optional uint32 createdListSize = 6;</code>
14088           */
14089          public int getCreatedListSize() {
14090            return createdListSize_;
14091          }
14092    
14093          // repeated uint64 deletedINode = 7 [packed = true];
14094          public static final int DELETEDINODE_FIELD_NUMBER = 7;
14095          private java.util.List<java.lang.Long> deletedINode_;
14096          /**
14097           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
14098           *
14099           * <pre>
14100           * id of deleted inodes
14101           * </pre>
14102           */
14103          public java.util.List<java.lang.Long>
14104              getDeletedINodeList() {
14105            return deletedINode_;
14106          }
14107          /**
14108           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
14109           *
14110           * <pre>
14111           * id of deleted inodes
14112           * </pre>
14113           */
14114          public int getDeletedINodeCount() {
14115            return deletedINode_.size();
14116          }
14117          /**
14118           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
14119           *
14120           * <pre>
14121           * id of deleted inodes
14122           * </pre>
14123           */
14124          public long getDeletedINode(int index) {
14125            return deletedINode_.get(index);
14126          }
14127          private int deletedINodeMemoizedSerializedSize = -1;
14128    
14129          // repeated uint32 deletedINodeRef = 8 [packed = true];
14130          public static final int DELETEDINODEREF_FIELD_NUMBER = 8;
14131          private java.util.List<java.lang.Integer> deletedINodeRef_;
14132          /**
14133           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
14134           *
14135           * <pre>
14136           * id of reference nodes in the deleted list
14137           * </pre>
14138           */
14139          public java.util.List<java.lang.Integer>
14140              getDeletedINodeRefList() {
14141            return deletedINodeRef_;
14142          }
14143          /**
14144           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
14145           *
14146           * <pre>
14147           * id of reference nodes in the deleted list
14148           * </pre>
14149           */
14150          public int getDeletedINodeRefCount() {
14151            return deletedINodeRef_.size();
14152          }
14153          /**
14154           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
14155           *
14156           * <pre>
14157           * id of reference nodes in the deleted list
14158           * </pre>
14159           */
14160          public int getDeletedINodeRef(int index) {
14161            return deletedINodeRef_.get(index);
14162          }
14163          private int deletedINodeRefMemoizedSerializedSize = -1;
14164    
14165          private void initFields() {
14166            snapshotId_ = 0;
14167            childrenSize_ = 0;
14168            isSnapshotRoot_ = false;
14169            name_ = com.google.protobuf.ByteString.EMPTY;
14170            snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
14171            createdListSize_ = 0;
14172            deletedINode_ = java.util.Collections.emptyList();
14173            deletedINodeRef_ = java.util.Collections.emptyList();
14174          }
14175          private byte memoizedIsInitialized = -1;
14176          public final boolean isInitialized() {
14177            byte isInitialized = memoizedIsInitialized;
14178            if (isInitialized != -1) return isInitialized == 1;
14179    
14180            memoizedIsInitialized = 1;
14181            return true;
14182          }
14183    
14184          public void writeTo(com.google.protobuf.CodedOutputStream output)
14185                              throws java.io.IOException {
14186            getSerializedSize();
14187            if (((bitField0_ & 0x00000001) == 0x00000001)) {
14188              output.writeUInt32(1, snapshotId_);
14189            }
14190            if (((bitField0_ & 0x00000002) == 0x00000002)) {
14191              output.writeUInt32(2, childrenSize_);
14192            }
14193            if (((bitField0_ & 0x00000004) == 0x00000004)) {
14194              output.writeBool(3, isSnapshotRoot_);
14195            }
14196            if (((bitField0_ & 0x00000008) == 0x00000008)) {
14197              output.writeBytes(4, name_);
14198            }
14199            if (((bitField0_ & 0x00000010) == 0x00000010)) {
14200              output.writeMessage(5, snapshotCopy_);
14201            }
14202            if (((bitField0_ & 0x00000020) == 0x00000020)) {
14203              output.writeUInt32(6, createdListSize_);
14204            }
14205            if (getDeletedINodeList().size() > 0) {
14206              output.writeRawVarint32(58);
14207              output.writeRawVarint32(deletedINodeMemoizedSerializedSize);
14208            }
14209            for (int i = 0; i < deletedINode_.size(); i++) {
14210              output.writeUInt64NoTag(deletedINode_.get(i));
14211            }
14212            if (getDeletedINodeRefList().size() > 0) {
14213              output.writeRawVarint32(66);
14214              output.writeRawVarint32(deletedINodeRefMemoizedSerializedSize);
14215            }
14216            for (int i = 0; i < deletedINodeRef_.size(); i++) {
14217              output.writeUInt32NoTag(deletedINodeRef_.get(i));
14218            }
14219            getUnknownFields().writeTo(output);
14220          }
14221    
14222          private int memoizedSerializedSize = -1;
14223          public int getSerializedSize() {
14224            int size = memoizedSerializedSize;
14225            if (size != -1) return size;
14226    
14227            size = 0;
14228            if (((bitField0_ & 0x00000001) == 0x00000001)) {
14229              size += com.google.protobuf.CodedOutputStream
14230                .computeUInt32Size(1, snapshotId_);
14231            }
14232            if (((bitField0_ & 0x00000002) == 0x00000002)) {
14233              size += com.google.protobuf.CodedOutputStream
14234                .computeUInt32Size(2, childrenSize_);
14235            }
14236            if (((bitField0_ & 0x00000004) == 0x00000004)) {
14237              size += com.google.protobuf.CodedOutputStream
14238                .computeBoolSize(3, isSnapshotRoot_);
14239            }
14240            if (((bitField0_ & 0x00000008) == 0x00000008)) {
14241              size += com.google.protobuf.CodedOutputStream
14242                .computeBytesSize(4, name_);
14243            }
14244            if (((bitField0_ & 0x00000010) == 0x00000010)) {
14245              size += com.google.protobuf.CodedOutputStream
14246                .computeMessageSize(5, snapshotCopy_);
14247            }
14248            if (((bitField0_ & 0x00000020) == 0x00000020)) {
14249              size += com.google.protobuf.CodedOutputStream
14250                .computeUInt32Size(6, createdListSize_);
14251            }
14252            {
14253              int dataSize = 0;
14254              for (int i = 0; i < deletedINode_.size(); i++) {
14255                dataSize += com.google.protobuf.CodedOutputStream
14256                  .computeUInt64SizeNoTag(deletedINode_.get(i));
14257              }
14258              size += dataSize;
14259              if (!getDeletedINodeList().isEmpty()) {
14260                size += 1;
14261                size += com.google.protobuf.CodedOutputStream
14262                    .computeInt32SizeNoTag(dataSize);
14263              }
14264              deletedINodeMemoizedSerializedSize = dataSize;
14265            }
14266            {
14267              int dataSize = 0;
14268              for (int i = 0; i < deletedINodeRef_.size(); i++) {
14269                dataSize += com.google.protobuf.CodedOutputStream
14270                  .computeUInt32SizeNoTag(deletedINodeRef_.get(i));
14271              }
14272              size += dataSize;
14273              if (!getDeletedINodeRefList().isEmpty()) {
14274                size += 1;
14275                size += com.google.protobuf.CodedOutputStream
14276                    .computeInt32SizeNoTag(dataSize);
14277              }
14278              deletedINodeRefMemoizedSerializedSize = dataSize;
14279            }
14280            size += getUnknownFields().getSerializedSize();
14281            memoizedSerializedSize = size;
14282            return size;
14283          }
14284    
14285          private static final long serialVersionUID = 0L;
14286          @java.lang.Override
14287          protected java.lang.Object writeReplace()
14288              throws java.io.ObjectStreamException {
14289            return super.writeReplace();
14290          }
14291    
14292          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
14293              com.google.protobuf.ByteString data)
14294              throws com.google.protobuf.InvalidProtocolBufferException {
14295            return PARSER.parseFrom(data);
14296          }
14297          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
14298              com.google.protobuf.ByteString data,
14299              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14300              throws com.google.protobuf.InvalidProtocolBufferException {
14301            return PARSER.parseFrom(data, extensionRegistry);
14302          }
14303          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(byte[] data)
14304              throws com.google.protobuf.InvalidProtocolBufferException {
14305            return PARSER.parseFrom(data);
14306          }
14307          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
14308              byte[] data,
14309              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14310              throws com.google.protobuf.InvalidProtocolBufferException {
14311            return PARSER.parseFrom(data, extensionRegistry);
14312          }
14313          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(java.io.InputStream input)
14314              throws java.io.IOException {
14315            return PARSER.parseFrom(input);
14316          }
14317          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
14318              java.io.InputStream input,
14319              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14320              throws java.io.IOException {
14321            return PARSER.parseFrom(input, extensionRegistry);
14322          }
14323          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseDelimitedFrom(java.io.InputStream input)
14324              throws java.io.IOException {
14325            return PARSER.parseDelimitedFrom(input);
14326          }
14327          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseDelimitedFrom(
14328              java.io.InputStream input,
14329              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14330              throws java.io.IOException {
14331            return PARSER.parseDelimitedFrom(input, extensionRegistry);
14332          }
14333          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
14334              com.google.protobuf.CodedInputStream input)
14335              throws java.io.IOException {
14336            return PARSER.parseFrom(input);
14337          }
14338          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
14339              com.google.protobuf.CodedInputStream input,
14340              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14341              throws java.io.IOException {
14342            return PARSER.parseFrom(input, extensionRegistry);
14343          }
14344    
14345          public static Builder newBuilder() { return Builder.create(); }
14346          public Builder newBuilderForType() { return newBuilder(); }
14347          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff prototype) {
14348            return newBuilder().mergeFrom(prototype);
14349          }
14350          public Builder toBuilder() { return newBuilder(this); }
14351    
14352          @java.lang.Override
14353          protected Builder newBuilderForType(
14354              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14355            Builder builder = new Builder(parent);
14356            return builder;
14357          }
14358          /**
14359           * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff}
14360           */
14361          public static final class Builder extends
14362              com.google.protobuf.GeneratedMessage.Builder<Builder>
14363             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiffOrBuilder {
14364            public static final com.google.protobuf.Descriptors.Descriptor
14365                getDescriptor() {
14366              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
14367            }
14368    
14369            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14370                internalGetFieldAccessorTable() {
14371              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable
14372                  .ensureFieldAccessorsInitialized(
14373                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.Builder.class);
14374            }
14375    
14376            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.newBuilder()
14377            private Builder() {
14378              maybeForceBuilderInitialization();
14379            }
14380    
14381            private Builder(
14382                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14383              super(parent);
14384              maybeForceBuilderInitialization();
14385            }
14386            private void maybeForceBuilderInitialization() {
14387              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14388                getSnapshotCopyFieldBuilder();
14389              }
14390            }
14391            private static Builder create() {
14392              return new Builder();
14393            }
14394    
14395            public Builder clear() {
14396              super.clear();
14397              snapshotId_ = 0;
14398              bitField0_ = (bitField0_ & ~0x00000001);
14399              childrenSize_ = 0;
14400              bitField0_ = (bitField0_ & ~0x00000002);
14401              isSnapshotRoot_ = false;
14402              bitField0_ = (bitField0_ & ~0x00000004);
14403              name_ = com.google.protobuf.ByteString.EMPTY;
14404              bitField0_ = (bitField0_ & ~0x00000008);
14405              if (snapshotCopyBuilder_ == null) {
14406                snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
14407              } else {
14408                snapshotCopyBuilder_.clear();
14409              }
14410              bitField0_ = (bitField0_ & ~0x00000010);
14411              createdListSize_ = 0;
14412              bitField0_ = (bitField0_ & ~0x00000020);
14413              deletedINode_ = java.util.Collections.emptyList();
14414              bitField0_ = (bitField0_ & ~0x00000040);
14415              deletedINodeRef_ = java.util.Collections.emptyList();
14416              bitField0_ = (bitField0_ & ~0x00000080);
14417              return this;
14418            }
14419    
14420            public Builder clone() {
14421              return create().mergeFrom(buildPartial());
14422            }
14423    
14424            public com.google.protobuf.Descriptors.Descriptor
14425                getDescriptorForType() {
14426              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
14427            }
14428    
14429            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff getDefaultInstanceForType() {
14430              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.getDefaultInstance();
14431            }
14432    
14433            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff build() {
14434              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff result = buildPartial();
14435              if (!result.isInitialized()) {
14436                throw newUninitializedMessageException(result);
14437              }
14438              return result;
14439            }
14440    
14441            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff buildPartial() {
14442              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff(this);
14443              int from_bitField0_ = bitField0_;
14444              int to_bitField0_ = 0;
14445              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
14446                to_bitField0_ |= 0x00000001;
14447              }
14448              result.snapshotId_ = snapshotId_;
14449              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
14450                to_bitField0_ |= 0x00000002;
14451              }
14452              result.childrenSize_ = childrenSize_;
14453              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
14454                to_bitField0_ |= 0x00000004;
14455              }
14456              result.isSnapshotRoot_ = isSnapshotRoot_;
14457              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
14458                to_bitField0_ |= 0x00000008;
14459              }
14460              result.name_ = name_;
14461              if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
14462                to_bitField0_ |= 0x00000010;
14463              }
14464              if (snapshotCopyBuilder_ == null) {
14465                result.snapshotCopy_ = snapshotCopy_;
14466              } else {
14467                result.snapshotCopy_ = snapshotCopyBuilder_.build();
14468              }
14469              if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
14470                to_bitField0_ |= 0x00000020;
14471              }
14472              result.createdListSize_ = createdListSize_;
14473              if (((bitField0_ & 0x00000040) == 0x00000040)) {
14474                deletedINode_ = java.util.Collections.unmodifiableList(deletedINode_);
14475                bitField0_ = (bitField0_ & ~0x00000040);
14476              }
14477              result.deletedINode_ = deletedINode_;
14478              if (((bitField0_ & 0x00000080) == 0x00000080)) {
14479                deletedINodeRef_ = java.util.Collections.unmodifiableList(deletedINodeRef_);
14480                bitField0_ = (bitField0_ & ~0x00000080);
14481              }
14482              result.deletedINodeRef_ = deletedINodeRef_;
14483              result.bitField0_ = to_bitField0_;
14484              onBuilt();
14485              return result;
14486            }
14487    
14488            public Builder mergeFrom(com.google.protobuf.Message other) {
14489              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff) {
14490                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff)other);
14491              } else {
14492                super.mergeFrom(other);
14493                return this;
14494              }
14495            }
14496    
14497            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff other) {
14498              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.getDefaultInstance()) return this;
14499              if (other.hasSnapshotId()) {
14500                setSnapshotId(other.getSnapshotId());
14501              }
14502              if (other.hasChildrenSize()) {
14503                setChildrenSize(other.getChildrenSize());
14504              }
14505              if (other.hasIsSnapshotRoot()) {
14506                setIsSnapshotRoot(other.getIsSnapshotRoot());
14507              }
14508              if (other.hasName()) {
14509                setName(other.getName());
14510              }
14511              if (other.hasSnapshotCopy()) {
14512                mergeSnapshotCopy(other.getSnapshotCopy());
14513              }
14514              if (other.hasCreatedListSize()) {
14515                setCreatedListSize(other.getCreatedListSize());
14516              }
14517              if (!other.deletedINode_.isEmpty()) {
14518                if (deletedINode_.isEmpty()) {
14519                  deletedINode_ = other.deletedINode_;
14520                  bitField0_ = (bitField0_ & ~0x00000040);
14521                } else {
14522                  ensureDeletedINodeIsMutable();
14523                  deletedINode_.addAll(other.deletedINode_);
14524                }
14525                onChanged();
14526              }
14527              if (!other.deletedINodeRef_.isEmpty()) {
14528                if (deletedINodeRef_.isEmpty()) {
14529                  deletedINodeRef_ = other.deletedINodeRef_;
14530                  bitField0_ = (bitField0_ & ~0x00000080);
14531                } else {
14532                  ensureDeletedINodeRefIsMutable();
14533                  deletedINodeRef_.addAll(other.deletedINodeRef_);
14534                }
14535                onChanged();
14536              }
14537              this.mergeUnknownFields(other.getUnknownFields());
14538              return this;
14539            }
14540    
14541            public final boolean isInitialized() {
14542              return true;
14543            }
14544    
14545            public Builder mergeFrom(
14546                com.google.protobuf.CodedInputStream input,
14547                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14548                throws java.io.IOException {
14549              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parsedMessage = null;
14550              try {
14551                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
14552              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14553                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff) e.getUnfinishedMessage();
14554                throw e;
14555              } finally {
14556                if (parsedMessage != null) {
14557                  mergeFrom(parsedMessage);
14558                }
14559              }
14560              return this;
14561            }
14562            private int bitField0_;
14563    
14564            // optional uint32 snapshotId = 1;
14565            private int snapshotId_ ;
14566            /**
14567             * <code>optional uint32 snapshotId = 1;</code>
14568             */
14569            public boolean hasSnapshotId() {
14570              return ((bitField0_ & 0x00000001) == 0x00000001);
14571            }
14572            /**
14573             * <code>optional uint32 snapshotId = 1;</code>
14574             */
14575            public int getSnapshotId() {
14576              return snapshotId_;
14577            }
14578            /**
14579             * <code>optional uint32 snapshotId = 1;</code>
14580             */
14581            public Builder setSnapshotId(int value) {
14582              bitField0_ |= 0x00000001;
14583              snapshotId_ = value;
14584              onChanged();
14585              return this;
14586            }
14587            /**
14588             * <code>optional uint32 snapshotId = 1;</code>
14589             */
14590            public Builder clearSnapshotId() {
14591              bitField0_ = (bitField0_ & ~0x00000001);
14592              snapshotId_ = 0;
14593              onChanged();
14594              return this;
14595            }
14596    
14597            // optional uint32 childrenSize = 2;
14598            private int childrenSize_ ;
14599            /**
14600             * <code>optional uint32 childrenSize = 2;</code>
14601             */
14602            public boolean hasChildrenSize() {
14603              return ((bitField0_ & 0x00000002) == 0x00000002);
14604            }
14605            /**
14606             * <code>optional uint32 childrenSize = 2;</code>
14607             */
14608            public int getChildrenSize() {
14609              return childrenSize_;
14610            }
14611            /**
14612             * <code>optional uint32 childrenSize = 2;</code>
14613             */
14614            public Builder setChildrenSize(int value) {
14615              bitField0_ |= 0x00000002;
14616              childrenSize_ = value;
14617              onChanged();
14618              return this;
14619            }
14620            /**
14621             * <code>optional uint32 childrenSize = 2;</code>
14622             */
14623            public Builder clearChildrenSize() {
14624              bitField0_ = (bitField0_ & ~0x00000002);
14625              childrenSize_ = 0;
14626              onChanged();
14627              return this;
14628            }
14629    
14630            // optional bool isSnapshotRoot = 3;
14631            private boolean isSnapshotRoot_ ;
14632            /**
14633             * <code>optional bool isSnapshotRoot = 3;</code>
14634             */
14635            public boolean hasIsSnapshotRoot() {
14636              return ((bitField0_ & 0x00000004) == 0x00000004);
14637            }
14638            /**
14639             * <code>optional bool isSnapshotRoot = 3;</code>
14640             */
14641            public boolean getIsSnapshotRoot() {
14642              return isSnapshotRoot_;
14643            }
14644            /**
14645             * <code>optional bool isSnapshotRoot = 3;</code>
14646             */
14647            public Builder setIsSnapshotRoot(boolean value) {
14648              bitField0_ |= 0x00000004;
14649              isSnapshotRoot_ = value;
14650              onChanged();
14651              return this;
14652            }
14653            /**
14654             * <code>optional bool isSnapshotRoot = 3;</code>
14655             */
14656            public Builder clearIsSnapshotRoot() {
14657              bitField0_ = (bitField0_ & ~0x00000004);
14658              isSnapshotRoot_ = false;
14659              onChanged();
14660              return this;
14661            }
14662    
14663            // optional bytes name = 4;
14664            private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
14665            /**
14666             * <code>optional bytes name = 4;</code>
14667             */
14668            public boolean hasName() {
14669              return ((bitField0_ & 0x00000008) == 0x00000008);
14670            }
14671            /**
14672             * <code>optional bytes name = 4;</code>
14673             */
14674            public com.google.protobuf.ByteString getName() {
14675              return name_;
14676            }
14677            /**
14678             * <code>optional bytes name = 4;</code>
14679             */
14680            public Builder setName(com.google.protobuf.ByteString value) {
14681              if (value == null) {
14682        throw new NullPointerException();
14683      }
14684      bitField0_ |= 0x00000008;
14685              name_ = value;
14686              onChanged();
14687              return this;
14688            }
14689            /**
14690             * <code>optional bytes name = 4;</code>
14691             */
14692            public Builder clearName() {
14693              bitField0_ = (bitField0_ & ~0x00000008);
14694              name_ = getDefaultInstance().getName();
14695              onChanged();
14696              return this;
14697            }
14698    
14699            // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;
14700            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
14701            private com.google.protobuf.SingleFieldBuilder<
14702                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> snapshotCopyBuilder_;
14703            /**
14704             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14705             */
14706            public boolean hasSnapshotCopy() {
14707              return ((bitField0_ & 0x00000010) == 0x00000010);
14708            }
14709            /**
14710             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14711             */
14712            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getSnapshotCopy() {
14713              if (snapshotCopyBuilder_ == null) {
14714                return snapshotCopy_;
14715              } else {
14716                return snapshotCopyBuilder_.getMessage();
14717              }
14718            }
14719            /**
14720             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14721             */
14722            public Builder setSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
14723              if (snapshotCopyBuilder_ == null) {
14724                if (value == null) {
14725                  throw new NullPointerException();
14726                }
14727                snapshotCopy_ = value;
14728                onChanged();
14729              } else {
14730                snapshotCopyBuilder_.setMessage(value);
14731              }
14732              bitField0_ |= 0x00000010;
14733              return this;
14734            }
14735            /**
14736             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14737             */
14738            public Builder setSnapshotCopy(
14739                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder builderForValue) {
14740              if (snapshotCopyBuilder_ == null) {
14741                snapshotCopy_ = builderForValue.build();
14742                onChanged();
14743              } else {
14744                snapshotCopyBuilder_.setMessage(builderForValue.build());
14745              }
14746              bitField0_ |= 0x00000010;
14747              return this;
14748            }
14749            /**
14750             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14751             */
14752            public Builder mergeSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
14753              if (snapshotCopyBuilder_ == null) {
14754                if (((bitField0_ & 0x00000010) == 0x00000010) &&
14755                    snapshotCopy_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance()) {
14756                  snapshotCopy_ =
14757                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.newBuilder(snapshotCopy_).mergeFrom(value).buildPartial();
14758                } else {
14759                  snapshotCopy_ = value;
14760                }
14761                onChanged();
14762              } else {
14763                snapshotCopyBuilder_.mergeFrom(value);
14764              }
14765              bitField0_ |= 0x00000010;
14766              return this;
14767            }
14768            /**
14769             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14770             */
14771            public Builder clearSnapshotCopy() {
14772              if (snapshotCopyBuilder_ == null) {
14773                snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
14774                onChanged();
14775              } else {
14776                snapshotCopyBuilder_.clear();
14777              }
14778              bitField0_ = (bitField0_ & ~0x00000010);
14779              return this;
14780            }
14781            /**
14782             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14783             */
14784            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder getSnapshotCopyBuilder() {
14785              bitField0_ |= 0x00000010;
14786              onChanged();
14787              return getSnapshotCopyFieldBuilder().getBuilder();
14788            }
14789            /**
14790             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14791             */
14792            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getSnapshotCopyOrBuilder() {
14793              if (snapshotCopyBuilder_ != null) {
14794                return snapshotCopyBuilder_.getMessageOrBuilder();
14795              } else {
14796                return snapshotCopy_;
14797              }
14798            }
14799            /**
14800             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
14801             */
14802            private com.google.protobuf.SingleFieldBuilder<
14803                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> 
14804                getSnapshotCopyFieldBuilder() {
14805              if (snapshotCopyBuilder_ == null) {
14806                snapshotCopyBuilder_ = new com.google.protobuf.SingleFieldBuilder<
14807                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder>(
14808                        snapshotCopy_,
14809                        getParentForChildren(),
14810                        isClean());
14811                snapshotCopy_ = null;
14812              }
14813              return snapshotCopyBuilder_;
14814            }
14815    
14816            // optional uint32 createdListSize = 6;
14817            private int createdListSize_ ;
14818            /**
14819             * <code>optional uint32 createdListSize = 6;</code>
14820             */
14821            public boolean hasCreatedListSize() {
14822              return ((bitField0_ & 0x00000020) == 0x00000020);
14823            }
14824            /**
14825             * <code>optional uint32 createdListSize = 6;</code>
14826             */
14827            public int getCreatedListSize() {
14828              return createdListSize_;
14829            }
14830            /**
14831             * <code>optional uint32 createdListSize = 6;</code>
14832             */
14833            public Builder setCreatedListSize(int value) {
14834              bitField0_ |= 0x00000020;
14835              createdListSize_ = value;
14836              onChanged();
14837              return this;
14838            }
14839            /**
14840             * <code>optional uint32 createdListSize = 6;</code>
14841             */
14842            public Builder clearCreatedListSize() {
14843              bitField0_ = (bitField0_ & ~0x00000020);
14844              createdListSize_ = 0;
14845              onChanged();
14846              return this;
14847            }
14848    
14849            // repeated uint64 deletedINode = 7 [packed = true];
14850            private java.util.List<java.lang.Long> deletedINode_ = java.util.Collections.emptyList();
14851            private void ensureDeletedINodeIsMutable() {
14852              if (!((bitField0_ & 0x00000040) == 0x00000040)) {
14853                deletedINode_ = new java.util.ArrayList<java.lang.Long>(deletedINode_);
14854                bitField0_ |= 0x00000040;
14855               }
14856            }
14857            /**
14858             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
14859             *
14860             * <pre>
14861             * id of deleted inodes
14862             * </pre>
14863             */
14864            public java.util.List<java.lang.Long>
14865                getDeletedINodeList() {
14866              return java.util.Collections.unmodifiableList(deletedINode_);
14867            }
14868            /**
14869             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
14870             *
14871             * <pre>
14872             * id of deleted inodes
14873             * </pre>
14874             */
14875            public int getDeletedINodeCount() {
14876              return deletedINode_.size();
14877            }
14878            /**
14879             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
14880             *
14881             * <pre>
14882             * id of deleted inodes
14883             * </pre>
14884             */
14885            public long getDeletedINode(int index) {
14886              return deletedINode_.get(index);
14887            }
14888            /**
14889             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
14890             *
14891             * <pre>
14892             * id of deleted inodes
14893             * </pre>
14894             */
14895            public Builder setDeletedINode(
14896                int index, long value) {
14897              ensureDeletedINodeIsMutable();
14898              deletedINode_.set(index, value);
14899              onChanged();
14900              return this;
14901            }
14902            /**
14903             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
14904             *
14905             * <pre>
14906             * id of deleted inodes
14907             * </pre>
14908             */
14909            public Builder addDeletedINode(long value) {
14910              ensureDeletedINodeIsMutable();
14911              deletedINode_.add(value);
14912              onChanged();
14913              return this;
14914            }
14915            /**
14916             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
14917             *
14918             * <pre>
14919             * id of deleted inodes
14920             * </pre>
14921             */
14922            public Builder addAllDeletedINode(
14923                java.lang.Iterable<? extends java.lang.Long> values) {
14924              ensureDeletedINodeIsMutable();
14925              super.addAll(values, deletedINode_);
14926              onChanged();
14927              return this;
14928            }
14929            /**
14930             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
14931             *
14932             * <pre>
14933             * id of deleted inodes
14934             * </pre>
14935             */
14936            public Builder clearDeletedINode() {
14937              deletedINode_ = java.util.Collections.emptyList();
14938              bitField0_ = (bitField0_ & ~0x00000040);
14939              onChanged();
14940              return this;
14941            }
14942    
14943            // repeated uint32 deletedINodeRef = 8 [packed = true];
14944            private java.util.List<java.lang.Integer> deletedINodeRef_ = java.util.Collections.emptyList();
14945            private void ensureDeletedINodeRefIsMutable() {
14946              if (!((bitField0_ & 0x00000080) == 0x00000080)) {
14947                deletedINodeRef_ = new java.util.ArrayList<java.lang.Integer>(deletedINodeRef_);
14948                bitField0_ |= 0x00000080;
14949               }
14950            }
14951            /**
14952             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
14953             *
14954             * <pre>
14955             * id of reference nodes in the deleted list
14956             * </pre>
14957             */
14958            public java.util.List<java.lang.Integer>
14959                getDeletedINodeRefList() {
14960              return java.util.Collections.unmodifiableList(deletedINodeRef_);
14961            }
14962            /**
14963             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
14964             *
14965             * <pre>
14966             * id of reference nodes in the deleted list
14967             * </pre>
14968             */
14969            public int getDeletedINodeRefCount() {
14970              return deletedINodeRef_.size();
14971            }
14972            /**
14973             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
14974             *
14975             * <pre>
14976             * id of reference nodes in the deleted list
14977             * </pre>
14978             */
14979            public int getDeletedINodeRef(int index) {
14980              return deletedINodeRef_.get(index);
14981            }
14982            /**
14983             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
14984             *
14985             * <pre>
14986             * id of reference nodes in the deleted list
14987             * </pre>
14988             */
14989            public Builder setDeletedINodeRef(
14990                int index, int value) {
14991              ensureDeletedINodeRefIsMutable();
14992              deletedINodeRef_.set(index, value);
14993              onChanged();
14994              return this;
14995            }
14996            /**
14997             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
14998             *
14999             * <pre>
15000             * id of reference nodes in the deleted list
15001             * </pre>
15002             */
15003            public Builder addDeletedINodeRef(int value) {
15004              ensureDeletedINodeRefIsMutable();
15005              deletedINodeRef_.add(value);
15006              onChanged();
15007              return this;
15008            }
15009            /**
15010             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15011             *
15012             * <pre>
15013             * id of reference nodes in the deleted list
15014             * </pre>
15015             */
15016            public Builder addAllDeletedINodeRef(
15017                java.lang.Iterable<? extends java.lang.Integer> values) {
15018              ensureDeletedINodeRefIsMutable();
15019              super.addAll(values, deletedINodeRef_);
15020              onChanged();
15021              return this;
15022            }
15023            /**
15024             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15025             *
15026             * <pre>
15027             * id of reference nodes in the deleted list
15028             * </pre>
15029             */
15030            public Builder clearDeletedINodeRef() {
15031              deletedINodeRef_ = java.util.Collections.emptyList();
15032              bitField0_ = (bitField0_ & ~0x00000080);
15033              onChanged();
15034              return this;
15035            }
15036    
15037            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff)
15038          }
15039    
15040          static {
15041            defaultInstance = new DirectoryDiff(true);
15042            defaultInstance.initFields();
15043          }
15044    
15045          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff)
15046        }
15047    
15048        public interface FileDiffOrBuilder
15049            extends com.google.protobuf.MessageOrBuilder {
15050    
15051          // optional uint32 snapshotId = 1;
15052          /**
15053           * <code>optional uint32 snapshotId = 1;</code>
15054           */
15055          boolean hasSnapshotId();
15056          /**
15057           * <code>optional uint32 snapshotId = 1;</code>
15058           */
15059          int getSnapshotId();
15060    
15061          // optional uint64 fileSize = 2;
15062          /**
15063           * <code>optional uint64 fileSize = 2;</code>
15064           */
15065          boolean hasFileSize();
15066          /**
15067           * <code>optional uint64 fileSize = 2;</code>
15068           */
15069          long getFileSize();
15070    
15071          // optional bytes name = 3;
15072          /**
15073           * <code>optional bytes name = 3;</code>
15074           */
15075          boolean hasName();
15076          /**
15077           * <code>optional bytes name = 3;</code>
15078           */
15079          com.google.protobuf.ByteString getName();
15080    
15081          // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;
15082          /**
15083           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15084           */
15085          boolean hasSnapshotCopy();
15086          /**
15087           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15088           */
15089          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getSnapshotCopy();
15090          /**
15091           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15092           */
15093          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getSnapshotCopyOrBuilder();
15094        }
15095        /**
15096         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff}
15097         */
15098        public static final class FileDiff extends
15099            com.google.protobuf.GeneratedMessage
15100            implements FileDiffOrBuilder {
15101          // Use FileDiff.newBuilder() to construct.
15102          private FileDiff(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
15103            super(builder);
15104            this.unknownFields = builder.getUnknownFields();
15105          }
15106          private FileDiff(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
15107    
15108          private static final FileDiff defaultInstance;
15109          public static FileDiff getDefaultInstance() {
15110            return defaultInstance;
15111          }
15112    
15113          public FileDiff getDefaultInstanceForType() {
15114            return defaultInstance;
15115          }
15116    
15117          private final com.google.protobuf.UnknownFieldSet unknownFields;
15118          @java.lang.Override
15119          public final com.google.protobuf.UnknownFieldSet
15120              getUnknownFields() {
15121            return this.unknownFields;
15122          }
15123          private FileDiff(
15124              com.google.protobuf.CodedInputStream input,
15125              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15126              throws com.google.protobuf.InvalidProtocolBufferException {
15127            initFields();
15128            int mutable_bitField0_ = 0;
15129            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15130                com.google.protobuf.UnknownFieldSet.newBuilder();
15131            try {
15132              boolean done = false;
15133              while (!done) {
15134                int tag = input.readTag();
15135                switch (tag) {
15136                  case 0:
15137                    done = true;
15138                    break;
15139                  default: {
15140                    if (!parseUnknownField(input, unknownFields,
15141                                           extensionRegistry, tag)) {
15142                      done = true;
15143                    }
15144                    break;
15145                  }
15146                  case 8: {
15147                    bitField0_ |= 0x00000001;
15148                    snapshotId_ = input.readUInt32();
15149                    break;
15150                  }
15151                  case 16: {
15152                    bitField0_ |= 0x00000002;
15153                    fileSize_ = input.readUInt64();
15154                    break;
15155                  }
15156                  case 26: {
15157                    bitField0_ |= 0x00000004;
15158                    name_ = input.readBytes();
15159                    break;
15160                  }
15161                  case 34: {
15162                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder subBuilder = null;
15163                    if (((bitField0_ & 0x00000008) == 0x00000008)) {
15164                      subBuilder = snapshotCopy_.toBuilder();
15165                    }
15166                    snapshotCopy_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.PARSER, extensionRegistry);
15167                    if (subBuilder != null) {
15168                      subBuilder.mergeFrom(snapshotCopy_);
15169                      snapshotCopy_ = subBuilder.buildPartial();
15170                    }
15171                    bitField0_ |= 0x00000008;
15172                    break;
15173                  }
15174                }
15175              }
15176            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15177              throw e.setUnfinishedMessage(this);
15178            } catch (java.io.IOException e) {
15179              throw new com.google.protobuf.InvalidProtocolBufferException(
15180                  e.getMessage()).setUnfinishedMessage(this);
15181            } finally {
15182              this.unknownFields = unknownFields.build();
15183              makeExtensionsImmutable();
15184            }
15185          }
15186          public static final com.google.protobuf.Descriptors.Descriptor
15187              getDescriptor() {
15188            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
15189          }
15190    
15191          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15192              internalGetFieldAccessorTable() {
15193            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable
15194                .ensureFieldAccessorsInitialized(
15195                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.Builder.class);
15196          }
15197    
15198          public static com.google.protobuf.Parser<FileDiff> PARSER =
15199              new com.google.protobuf.AbstractParser<FileDiff>() {
15200            public FileDiff parsePartialFrom(
15201                com.google.protobuf.CodedInputStream input,
15202                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15203                throws com.google.protobuf.InvalidProtocolBufferException {
15204              return new FileDiff(input, extensionRegistry);
15205            }
15206          };
15207    
15208          @java.lang.Override
15209          public com.google.protobuf.Parser<FileDiff> getParserForType() {
15210            return PARSER;
15211          }
15212    
15213          private int bitField0_;
15214          // optional uint32 snapshotId = 1;
15215          public static final int SNAPSHOTID_FIELD_NUMBER = 1;
15216          private int snapshotId_;
15217          /**
15218           * <code>optional uint32 snapshotId = 1;</code>
15219           */
15220          public boolean hasSnapshotId() {
15221            return ((bitField0_ & 0x00000001) == 0x00000001);
15222          }
15223          /**
15224           * <code>optional uint32 snapshotId = 1;</code>
15225           */
15226          public int getSnapshotId() {
15227            return snapshotId_;
15228          }
15229    
15230          // optional uint64 fileSize = 2;
15231          public static final int FILESIZE_FIELD_NUMBER = 2;
15232          private long fileSize_;
15233          /**
15234           * <code>optional uint64 fileSize = 2;</code>
15235           */
15236          public boolean hasFileSize() {
15237            return ((bitField0_ & 0x00000002) == 0x00000002);
15238          }
15239          /**
15240           * <code>optional uint64 fileSize = 2;</code>
15241           */
15242          public long getFileSize() {
15243            return fileSize_;
15244          }
15245    
15246          // optional bytes name = 3;
15247          public static final int NAME_FIELD_NUMBER = 3;
15248          private com.google.protobuf.ByteString name_;
15249          /**
15250           * <code>optional bytes name = 3;</code>
15251           */
15252          public boolean hasName() {
15253            return ((bitField0_ & 0x00000004) == 0x00000004);
15254          }
15255          /**
15256           * <code>optional bytes name = 3;</code>
15257           */
15258          public com.google.protobuf.ByteString getName() {
15259            return name_;
15260          }
15261    
15262          // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;
15263          public static final int SNAPSHOTCOPY_FIELD_NUMBER = 4;
15264          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile snapshotCopy_;
15265          /**
15266           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15267           */
15268          public boolean hasSnapshotCopy() {
15269            return ((bitField0_ & 0x00000008) == 0x00000008);
15270          }
15271          /**
15272           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15273           */
15274          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getSnapshotCopy() {
15275            return snapshotCopy_;
15276          }
15277          /**
15278           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15279           */
15280          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getSnapshotCopyOrBuilder() {
15281            return snapshotCopy_;
15282          }
15283    
15284          private void initFields() {
15285            snapshotId_ = 0;
15286            fileSize_ = 0L;
15287            name_ = com.google.protobuf.ByteString.EMPTY;
15288            snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
15289          }
15290          private byte memoizedIsInitialized = -1;
15291          public final boolean isInitialized() {
15292            byte isInitialized = memoizedIsInitialized;
15293            if (isInitialized != -1) return isInitialized == 1;
15294    
15295            if (hasSnapshotCopy()) {
15296              if (!getSnapshotCopy().isInitialized()) {
15297                memoizedIsInitialized = 0;
15298                return false;
15299              }
15300            }
15301            memoizedIsInitialized = 1;
15302            return true;
15303          }
15304    
15305          public void writeTo(com.google.protobuf.CodedOutputStream output)
15306                              throws java.io.IOException {
15307            getSerializedSize();
15308            if (((bitField0_ & 0x00000001) == 0x00000001)) {
15309              output.writeUInt32(1, snapshotId_);
15310            }
15311            if (((bitField0_ & 0x00000002) == 0x00000002)) {
15312              output.writeUInt64(2, fileSize_);
15313            }
15314            if (((bitField0_ & 0x00000004) == 0x00000004)) {
15315              output.writeBytes(3, name_);
15316            }
15317            if (((bitField0_ & 0x00000008) == 0x00000008)) {
15318              output.writeMessage(4, snapshotCopy_);
15319            }
15320            getUnknownFields().writeTo(output);
15321          }
15322    
15323          private int memoizedSerializedSize = -1;
15324          public int getSerializedSize() {
15325            int size = memoizedSerializedSize;
15326            if (size != -1) return size;
15327    
15328            size = 0;
15329            if (((bitField0_ & 0x00000001) == 0x00000001)) {
15330              size += com.google.protobuf.CodedOutputStream
15331                .computeUInt32Size(1, snapshotId_);
15332            }
15333            if (((bitField0_ & 0x00000002) == 0x00000002)) {
15334              size += com.google.protobuf.CodedOutputStream
15335                .computeUInt64Size(2, fileSize_);
15336            }
15337            if (((bitField0_ & 0x00000004) == 0x00000004)) {
15338              size += com.google.protobuf.CodedOutputStream
15339                .computeBytesSize(3, name_);
15340            }
15341            if (((bitField0_ & 0x00000008) == 0x00000008)) {
15342              size += com.google.protobuf.CodedOutputStream
15343                .computeMessageSize(4, snapshotCopy_);
15344            }
15345            size += getUnknownFields().getSerializedSize();
15346            memoizedSerializedSize = size;
15347            return size;
15348          }
15349    
15350          private static final long serialVersionUID = 0L;
15351          @java.lang.Override
15352          protected java.lang.Object writeReplace()
15353              throws java.io.ObjectStreamException {
15354            return super.writeReplace();
15355          }
15356    
15357          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
15358              com.google.protobuf.ByteString data)
15359              throws com.google.protobuf.InvalidProtocolBufferException {
15360            return PARSER.parseFrom(data);
15361          }
15362          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
15363              com.google.protobuf.ByteString data,
15364              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15365              throws com.google.protobuf.InvalidProtocolBufferException {
15366            return PARSER.parseFrom(data, extensionRegistry);
15367          }
15368          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(byte[] data)
15369              throws com.google.protobuf.InvalidProtocolBufferException {
15370            return PARSER.parseFrom(data);
15371          }
15372          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
15373              byte[] data,
15374              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15375              throws com.google.protobuf.InvalidProtocolBufferException {
15376            return PARSER.parseFrom(data, extensionRegistry);
15377          }
15378          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(java.io.InputStream input)
15379              throws java.io.IOException {
15380            return PARSER.parseFrom(input);
15381          }
15382          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
15383              java.io.InputStream input,
15384              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15385              throws java.io.IOException {
15386            return PARSER.parseFrom(input, extensionRegistry);
15387          }
15388          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseDelimitedFrom(java.io.InputStream input)
15389              throws java.io.IOException {
15390            return PARSER.parseDelimitedFrom(input);
15391          }
15392          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseDelimitedFrom(
15393              java.io.InputStream input,
15394              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15395              throws java.io.IOException {
15396            return PARSER.parseDelimitedFrom(input, extensionRegistry);
15397          }
15398          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
15399              com.google.protobuf.CodedInputStream input)
15400              throws java.io.IOException {
15401            return PARSER.parseFrom(input);
15402          }
15403          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
15404              com.google.protobuf.CodedInputStream input,
15405              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15406              throws java.io.IOException {
15407            return PARSER.parseFrom(input, extensionRegistry);
15408          }
15409    
15410          public static Builder newBuilder() { return Builder.create(); }
15411          public Builder newBuilderForType() { return newBuilder(); }
15412          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff prototype) {
15413            return newBuilder().mergeFrom(prototype);
15414          }
15415          public Builder toBuilder() { return newBuilder(this); }
15416    
15417          @java.lang.Override
15418          protected Builder newBuilderForType(
15419              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15420            Builder builder = new Builder(parent);
15421            return builder;
15422          }
15423          /**
15424           * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff}
15425           */
15426          public static final class Builder extends
15427              com.google.protobuf.GeneratedMessage.Builder<Builder>
15428             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiffOrBuilder {
15429            public static final com.google.protobuf.Descriptors.Descriptor
15430                getDescriptor() {
15431              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
15432            }
15433    
15434            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15435                internalGetFieldAccessorTable() {
15436              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable
15437                  .ensureFieldAccessorsInitialized(
15438                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.Builder.class);
15439            }
15440    
15441            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.newBuilder()
15442            private Builder() {
15443              maybeForceBuilderInitialization();
15444            }
15445    
15446            private Builder(
15447                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15448              super(parent);
15449              maybeForceBuilderInitialization();
15450            }
15451            private void maybeForceBuilderInitialization() {
15452              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
15453                getSnapshotCopyFieldBuilder();
15454              }
15455            }
15456            private static Builder create() {
15457              return new Builder();
15458            }
15459    
15460            public Builder clear() {
15461              super.clear();
15462              snapshotId_ = 0;
15463              bitField0_ = (bitField0_ & ~0x00000001);
15464              fileSize_ = 0L;
15465              bitField0_ = (bitField0_ & ~0x00000002);
15466              name_ = com.google.protobuf.ByteString.EMPTY;
15467              bitField0_ = (bitField0_ & ~0x00000004);
15468              if (snapshotCopyBuilder_ == null) {
15469                snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
15470              } else {
15471                snapshotCopyBuilder_.clear();
15472              }
15473              bitField0_ = (bitField0_ & ~0x00000008);
15474              return this;
15475            }
15476    
15477            public Builder clone() {
15478              return create().mergeFrom(buildPartial());
15479            }
15480    
15481            public com.google.protobuf.Descriptors.Descriptor
15482                getDescriptorForType() {
15483              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
15484            }
15485    
15486            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff getDefaultInstanceForType() {
15487              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.getDefaultInstance();
15488            }
15489    
15490            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff build() {
15491              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff result = buildPartial();
15492              if (!result.isInitialized()) {
15493                throw newUninitializedMessageException(result);
15494              }
15495              return result;
15496            }
15497    
15498            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff buildPartial() {
15499              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff(this);
15500              int from_bitField0_ = bitField0_;
15501              int to_bitField0_ = 0;
15502              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
15503                to_bitField0_ |= 0x00000001;
15504              }
15505              result.snapshotId_ = snapshotId_;
15506              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
15507                to_bitField0_ |= 0x00000002;
15508              }
15509              result.fileSize_ = fileSize_;
15510              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
15511                to_bitField0_ |= 0x00000004;
15512              }
15513              result.name_ = name_;
15514              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
15515                to_bitField0_ |= 0x00000008;
15516              }
15517              if (snapshotCopyBuilder_ == null) {
15518                result.snapshotCopy_ = snapshotCopy_;
15519              } else {
15520                result.snapshotCopy_ = snapshotCopyBuilder_.build();
15521              }
15522              result.bitField0_ = to_bitField0_;
15523              onBuilt();
15524              return result;
15525            }
15526    
15527            public Builder mergeFrom(com.google.protobuf.Message other) {
15528              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff) {
15529                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff)other);
15530              } else {
15531                super.mergeFrom(other);
15532                return this;
15533              }
15534            }
15535    
15536            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff other) {
15537              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.getDefaultInstance()) return this;
15538              if (other.hasSnapshotId()) {
15539                setSnapshotId(other.getSnapshotId());
15540              }
15541              if (other.hasFileSize()) {
15542                setFileSize(other.getFileSize());
15543              }
15544              if (other.hasName()) {
15545                setName(other.getName());
15546              }
15547              if (other.hasSnapshotCopy()) {
15548                mergeSnapshotCopy(other.getSnapshotCopy());
15549              }
15550              this.mergeUnknownFields(other.getUnknownFields());
15551              return this;
15552            }
15553    
15554            public final boolean isInitialized() {
15555              if (hasSnapshotCopy()) {
15556                if (!getSnapshotCopy().isInitialized()) {
15557                  
15558                  return false;
15559                }
15560              }
15561              return true;
15562            }
15563    
15564            public Builder mergeFrom(
15565                com.google.protobuf.CodedInputStream input,
15566                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15567                throws java.io.IOException {
15568              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parsedMessage = null;
15569              try {
15570                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15571              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15572                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff) e.getUnfinishedMessage();
15573                throw e;
15574              } finally {
15575                if (parsedMessage != null) {
15576                  mergeFrom(parsedMessage);
15577                }
15578              }
15579              return this;
15580            }
15581            private int bitField0_;
15582    
15583            // optional uint32 snapshotId = 1;
15584            private int snapshotId_ ;
15585            /**
15586             * <code>optional uint32 snapshotId = 1;</code>
15587             */
15588            public boolean hasSnapshotId() {
15589              return ((bitField0_ & 0x00000001) == 0x00000001);
15590            }
15591            /**
15592             * <code>optional uint32 snapshotId = 1;</code>
15593             */
15594            public int getSnapshotId() {
15595              return snapshotId_;
15596            }
15597            /**
15598             * <code>optional uint32 snapshotId = 1;</code>
15599             */
15600            public Builder setSnapshotId(int value) {
15601              bitField0_ |= 0x00000001;
15602              snapshotId_ = value;
15603              onChanged();
15604              return this;
15605            }
15606            /**
15607             * <code>optional uint32 snapshotId = 1;</code>
15608             */
15609            public Builder clearSnapshotId() {
15610              bitField0_ = (bitField0_ & ~0x00000001);
15611              snapshotId_ = 0;
15612              onChanged();
15613              return this;
15614            }
15615    
15616            // optional uint64 fileSize = 2;
15617            private long fileSize_ ;
15618            /**
15619             * <code>optional uint64 fileSize = 2;</code>
15620             */
15621            public boolean hasFileSize() {
15622              return ((bitField0_ & 0x00000002) == 0x00000002);
15623            }
15624            /**
15625             * <code>optional uint64 fileSize = 2;</code>
15626             */
15627            public long getFileSize() {
15628              return fileSize_;
15629            }
15630            /**
15631             * <code>optional uint64 fileSize = 2;</code>
15632             */
15633            public Builder setFileSize(long value) {
15634              bitField0_ |= 0x00000002;
15635              fileSize_ = value;
15636              onChanged();
15637              return this;
15638            }
15639            /**
15640             * <code>optional uint64 fileSize = 2;</code>
15641             */
15642            public Builder clearFileSize() {
15643              bitField0_ = (bitField0_ & ~0x00000002);
15644              fileSize_ = 0L;
15645              onChanged();
15646              return this;
15647            }
15648    
15649            // optional bytes name = 3;
15650            private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
15651            /**
15652             * <code>optional bytes name = 3;</code>
15653             */
15654            public boolean hasName() {
15655              return ((bitField0_ & 0x00000004) == 0x00000004);
15656            }
15657            /**
15658             * <code>optional bytes name = 3;</code>
15659             */
15660            public com.google.protobuf.ByteString getName() {
15661              return name_;
15662            }
15663            /**
15664             * <code>optional bytes name = 3;</code>
15665             */
15666            public Builder setName(com.google.protobuf.ByteString value) {
15667              if (value == null) {
15668        throw new NullPointerException();
15669      }
15670      bitField0_ |= 0x00000004;
15671              name_ = value;
15672              onChanged();
15673              return this;
15674            }
15675            /**
15676             * <code>optional bytes name = 3;</code>
15677             */
15678            public Builder clearName() {
15679              bitField0_ = (bitField0_ & ~0x00000004);
15680              name_ = getDefaultInstance().getName();
15681              onChanged();
15682              return this;
15683            }
15684    
15685            // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;
15686            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
15687            private com.google.protobuf.SingleFieldBuilder<
15688                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> snapshotCopyBuilder_;
15689            /**
15690             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15691             */
15692            public boolean hasSnapshotCopy() {
15693              return ((bitField0_ & 0x00000008) == 0x00000008);
15694            }
15695            /**
15696             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15697             */
15698            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getSnapshotCopy() {
15699              if (snapshotCopyBuilder_ == null) {
15700                return snapshotCopy_;
15701              } else {
15702                return snapshotCopyBuilder_.getMessage();
15703              }
15704            }
15705            /**
15706             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15707             */
15708            public Builder setSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
15709              if (snapshotCopyBuilder_ == null) {
15710                if (value == null) {
15711                  throw new NullPointerException();
15712                }
15713                snapshotCopy_ = value;
15714                onChanged();
15715              } else {
15716                snapshotCopyBuilder_.setMessage(value);
15717              }
15718              bitField0_ |= 0x00000008;
15719              return this;
15720            }
15721            /**
15722             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15723             */
15724            public Builder setSnapshotCopy(
15725                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder builderForValue) {
15726              if (snapshotCopyBuilder_ == null) {
15727                snapshotCopy_ = builderForValue.build();
15728                onChanged();
15729              } else {
15730                snapshotCopyBuilder_.setMessage(builderForValue.build());
15731              }
15732              bitField0_ |= 0x00000008;
15733              return this;
15734            }
15735            /**
15736             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15737             */
15738            public Builder mergeSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
15739              if (snapshotCopyBuilder_ == null) {
15740                if (((bitField0_ & 0x00000008) == 0x00000008) &&
15741                    snapshotCopy_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance()) {
15742                  snapshotCopy_ =
15743                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.newBuilder(snapshotCopy_).mergeFrom(value).buildPartial();
15744                } else {
15745                  snapshotCopy_ = value;
15746                }
15747                onChanged();
15748              } else {
15749                snapshotCopyBuilder_.mergeFrom(value);
15750              }
15751              bitField0_ |= 0x00000008;
15752              return this;
15753            }
15754            /**
15755             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15756             */
15757            public Builder clearSnapshotCopy() {
15758              if (snapshotCopyBuilder_ == null) {
15759                snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
15760                onChanged();
15761              } else {
15762                snapshotCopyBuilder_.clear();
15763              }
15764              bitField0_ = (bitField0_ & ~0x00000008);
15765              return this;
15766            }
15767            /**
15768             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15769             */
15770            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder getSnapshotCopyBuilder() {
15771              bitField0_ |= 0x00000008;
15772              onChanged();
15773              return getSnapshotCopyFieldBuilder().getBuilder();
15774            }
15775            /**
15776             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15777             */
15778            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getSnapshotCopyOrBuilder() {
15779              if (snapshotCopyBuilder_ != null) {
15780                return snapshotCopyBuilder_.getMessageOrBuilder();
15781              } else {
15782                return snapshotCopy_;
15783              }
15784            }
15785            /**
15786             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
15787             */
15788            private com.google.protobuf.SingleFieldBuilder<
15789                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> 
15790                getSnapshotCopyFieldBuilder() {
15791              if (snapshotCopyBuilder_ == null) {
15792                snapshotCopyBuilder_ = new com.google.protobuf.SingleFieldBuilder<
15793                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder>(
15794                        snapshotCopy_,
15795                        getParentForChildren(),
15796                        isClean());
15797                snapshotCopy_ = null;
15798              }
15799              return snapshotCopyBuilder_;
15800            }
15801    
15802            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff)
15803          }
15804    
15805          static {
15806            defaultInstance = new FileDiff(true);
15807            defaultInstance.initFields();
15808          }
15809    
15810          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff)
15811        }
15812    
15813        public interface DiffEntryOrBuilder
15814            extends com.google.protobuf.MessageOrBuilder {
15815    
15816          // required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;
15817          /**
15818           * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
15819           */
15820          boolean hasType();
15821          /**
15822           * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
15823           */
15824          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type getType();
15825    
15826          // optional uint64 inodeId = 2;
15827          /**
15828           * <code>optional uint64 inodeId = 2;</code>
15829           */
15830          boolean hasInodeId();
15831          /**
15832           * <code>optional uint64 inodeId = 2;</code>
15833           */
15834          long getInodeId();
15835    
15836          // optional uint32 numOfDiff = 3;
15837          /**
15838           * <code>optional uint32 numOfDiff = 3;</code>
15839           */
15840          boolean hasNumOfDiff();
15841          /**
15842           * <code>optional uint32 numOfDiff = 3;</code>
15843           */
15844          int getNumOfDiff();
15845        }
15846        /**
15847         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry}
15848         */
15849        public static final class DiffEntry extends
15850            com.google.protobuf.GeneratedMessage
15851            implements DiffEntryOrBuilder {
15852          // Use DiffEntry.newBuilder() to construct.
15853          private DiffEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
15854            super(builder);
15855            this.unknownFields = builder.getUnknownFields();
15856          }
15857          private DiffEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
15858    
15859          private static final DiffEntry defaultInstance;
15860          public static DiffEntry getDefaultInstance() {
15861            return defaultInstance;
15862          }
15863    
15864          public DiffEntry getDefaultInstanceForType() {
15865            return defaultInstance;
15866          }
15867    
15868          private final com.google.protobuf.UnknownFieldSet unknownFields;
15869          @java.lang.Override
15870          public final com.google.protobuf.UnknownFieldSet
15871              getUnknownFields() {
15872            return this.unknownFields;
15873          }
15874          private DiffEntry(
15875              com.google.protobuf.CodedInputStream input,
15876              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15877              throws com.google.protobuf.InvalidProtocolBufferException {
15878            initFields();
15879            int mutable_bitField0_ = 0;
15880            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15881                com.google.protobuf.UnknownFieldSet.newBuilder();
15882            try {
15883              boolean done = false;
15884              while (!done) {
15885                int tag = input.readTag();
15886                switch (tag) {
15887                  case 0:
15888                    done = true;
15889                    break;
15890                  default: {
15891                    if (!parseUnknownField(input, unknownFields,
15892                                           extensionRegistry, tag)) {
15893                      done = true;
15894                    }
15895                    break;
15896                  }
15897                  case 8: {
15898                    int rawValue = input.readEnum();
15899                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type value = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.valueOf(rawValue);
15900                    if (value == null) {
15901                      unknownFields.mergeVarintField(1, rawValue);
15902                    } else {
15903                      bitField0_ |= 0x00000001;
15904                      type_ = value;
15905                    }
15906                    break;
15907                  }
15908                  case 16: {
15909                    bitField0_ |= 0x00000002;
15910                    inodeId_ = input.readUInt64();
15911                    break;
15912                  }
15913                  case 24: {
15914                    bitField0_ |= 0x00000004;
15915                    numOfDiff_ = input.readUInt32();
15916                    break;
15917                  }
15918                }
15919              }
15920            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15921              throw e.setUnfinishedMessage(this);
15922            } catch (java.io.IOException e) {
15923              throw new com.google.protobuf.InvalidProtocolBufferException(
15924                  e.getMessage()).setUnfinishedMessage(this);
15925            } finally {
15926              this.unknownFields = unknownFields.build();
15927              makeExtensionsImmutable();
15928            }
15929          }
15930          public static final com.google.protobuf.Descriptors.Descriptor
15931              getDescriptor() {
15932            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
15933          }
15934    
15935          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15936              internalGetFieldAccessorTable() {
15937            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable
15938                .ensureFieldAccessorsInitialized(
15939                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Builder.class);
15940          }
15941    
15942          public static com.google.protobuf.Parser<DiffEntry> PARSER =
15943              new com.google.protobuf.AbstractParser<DiffEntry>() {
15944            public DiffEntry parsePartialFrom(
15945                com.google.protobuf.CodedInputStream input,
15946                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15947                throws com.google.protobuf.InvalidProtocolBufferException {
15948              return new DiffEntry(input, extensionRegistry);
15949            }
15950          };
15951    
15952          @java.lang.Override
15953          public com.google.protobuf.Parser<DiffEntry> getParserForType() {
15954            return PARSER;
15955          }
15956    
15957          /**
15958           * Protobuf enum {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type}
15959           */
15960          public enum Type
15961              implements com.google.protobuf.ProtocolMessageEnum {
15962            /**
15963             * <code>FILEDIFF = 1;</code>
15964             */
15965            FILEDIFF(0, 1),
15966            /**
15967             * <code>DIRECTORYDIFF = 2;</code>
15968             */
15969            DIRECTORYDIFF(1, 2),
15970            ;
15971    
15972            /**
15973             * <code>FILEDIFF = 1;</code>
15974             */
15975            public static final int FILEDIFF_VALUE = 1;
15976            /**
15977             * <code>DIRECTORYDIFF = 2;</code>
15978             */
15979            public static final int DIRECTORYDIFF_VALUE = 2;
15980    
15981    
15982            public final int getNumber() { return value; }
15983    
15984            public static Type valueOf(int value) {
15985              switch (value) {
15986                case 1: return FILEDIFF;
15987                case 2: return DIRECTORYDIFF;
15988                default: return null;
15989              }
15990            }
15991    
15992            public static com.google.protobuf.Internal.EnumLiteMap<Type>
15993                internalGetValueMap() {
15994              return internalValueMap;
15995            }
15996            private static com.google.protobuf.Internal.EnumLiteMap<Type>
15997                internalValueMap =
15998                  new com.google.protobuf.Internal.EnumLiteMap<Type>() {
15999                    public Type findValueByNumber(int number) {
16000                      return Type.valueOf(number);
16001                    }
16002                  };
16003    
16004            public final com.google.protobuf.Descriptors.EnumValueDescriptor
16005                getValueDescriptor() {
16006              return getDescriptor().getValues().get(index);
16007            }
16008            public final com.google.protobuf.Descriptors.EnumDescriptor
16009                getDescriptorForType() {
16010              return getDescriptor();
16011            }
16012            public static final com.google.protobuf.Descriptors.EnumDescriptor
16013                getDescriptor() {
16014              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.getDescriptor().getEnumTypes().get(0);
16015            }
16016    
16017            private static final Type[] VALUES = values();
16018    
16019            public static Type valueOf(
16020                com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
16021              if (desc.getType() != getDescriptor()) {
16022                throw new java.lang.IllegalArgumentException(
16023                  "EnumValueDescriptor is not for this type.");
16024              }
16025              return VALUES[desc.getIndex()];
16026            }
16027    
16028            private final int index;
16029            private final int value;
16030    
16031            private Type(int index, int value) {
16032              this.index = index;
16033              this.value = value;
16034            }
16035    
16036            // @@protoc_insertion_point(enum_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type)
16037          }
16038    
16039          private int bitField0_;
16040          // required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;
16041          public static final int TYPE_FIELD_NUMBER = 1;
16042          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type type_;
16043          /**
16044           * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
16045           */
16046          public boolean hasType() {
16047            return ((bitField0_ & 0x00000001) == 0x00000001);
16048          }
16049          /**
16050           * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
16051           */
16052          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type getType() {
16053            return type_;
16054          }
16055    
16056          // optional uint64 inodeId = 2;
16057          public static final int INODEID_FIELD_NUMBER = 2;
16058          private long inodeId_;
16059          /**
16060           * <code>optional uint64 inodeId = 2;</code>
16061           */
16062          public boolean hasInodeId() {
16063            return ((bitField0_ & 0x00000002) == 0x00000002);
16064          }
16065          /**
16066           * <code>optional uint64 inodeId = 2;</code>
16067           */
16068          public long getInodeId() {
16069            return inodeId_;
16070          }
16071    
16072          // optional uint32 numOfDiff = 3;
16073          public static final int NUMOFDIFF_FIELD_NUMBER = 3;
16074          private int numOfDiff_;
16075          /**
16076           * <code>optional uint32 numOfDiff = 3;</code>
16077           */
16078          public boolean hasNumOfDiff() {
16079            return ((bitField0_ & 0x00000004) == 0x00000004);
16080          }
16081          /**
16082           * <code>optional uint32 numOfDiff = 3;</code>
16083           */
16084          public int getNumOfDiff() {
16085            return numOfDiff_;
16086          }
16087    
16088          private void initFields() {
16089            type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
16090            inodeId_ = 0L;
16091            numOfDiff_ = 0;
16092          }
16093          private byte memoizedIsInitialized = -1;
16094          public final boolean isInitialized() {
16095            byte isInitialized = memoizedIsInitialized;
16096            if (isInitialized != -1) return isInitialized == 1;
16097    
16098            if (!hasType()) {
16099              memoizedIsInitialized = 0;
16100              return false;
16101            }
16102            memoizedIsInitialized = 1;
16103            return true;
16104          }
16105    
16106          public void writeTo(com.google.protobuf.CodedOutputStream output)
16107                              throws java.io.IOException {
16108            getSerializedSize();
16109            if (((bitField0_ & 0x00000001) == 0x00000001)) {
16110              output.writeEnum(1, type_.getNumber());
16111            }
16112            if (((bitField0_ & 0x00000002) == 0x00000002)) {
16113              output.writeUInt64(2, inodeId_);
16114            }
16115            if (((bitField0_ & 0x00000004) == 0x00000004)) {
16116              output.writeUInt32(3, numOfDiff_);
16117            }
16118            getUnknownFields().writeTo(output);
16119          }
16120    
16121          private int memoizedSerializedSize = -1;
16122          public int getSerializedSize() {
16123            int size = memoizedSerializedSize;
16124            if (size != -1) return size;
16125    
16126            size = 0;
16127            if (((bitField0_ & 0x00000001) == 0x00000001)) {
16128              size += com.google.protobuf.CodedOutputStream
16129                .computeEnumSize(1, type_.getNumber());
16130            }
16131            if (((bitField0_ & 0x00000002) == 0x00000002)) {
16132              size += com.google.protobuf.CodedOutputStream
16133                .computeUInt64Size(2, inodeId_);
16134            }
16135            if (((bitField0_ & 0x00000004) == 0x00000004)) {
16136              size += com.google.protobuf.CodedOutputStream
16137                .computeUInt32Size(3, numOfDiff_);
16138            }
16139            size += getUnknownFields().getSerializedSize();
16140            memoizedSerializedSize = size;
16141            return size;
16142          }
16143    
16144          private static final long serialVersionUID = 0L;
16145          @java.lang.Override
16146          protected java.lang.Object writeReplace()
16147              throws java.io.ObjectStreamException {
16148            return super.writeReplace();
16149          }
16150    
16151          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
16152              com.google.protobuf.ByteString data)
16153              throws com.google.protobuf.InvalidProtocolBufferException {
16154            return PARSER.parseFrom(data);
16155          }
16156          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
16157              com.google.protobuf.ByteString data,
16158              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16159              throws com.google.protobuf.InvalidProtocolBufferException {
16160            return PARSER.parseFrom(data, extensionRegistry);
16161          }
16162          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(byte[] data)
16163              throws com.google.protobuf.InvalidProtocolBufferException {
16164            return PARSER.parseFrom(data);
16165          }
16166          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
16167              byte[] data,
16168              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16169              throws com.google.protobuf.InvalidProtocolBufferException {
16170            return PARSER.parseFrom(data, extensionRegistry);
16171          }
16172          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(java.io.InputStream input)
16173              throws java.io.IOException {
16174            return PARSER.parseFrom(input);
16175          }
16176          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
16177              java.io.InputStream input,
16178              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16179              throws java.io.IOException {
16180            return PARSER.parseFrom(input, extensionRegistry);
16181          }
16182          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseDelimitedFrom(java.io.InputStream input)
16183              throws java.io.IOException {
16184            return PARSER.parseDelimitedFrom(input);
16185          }
16186          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseDelimitedFrom(
16187              java.io.InputStream input,
16188              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16189              throws java.io.IOException {
16190            return PARSER.parseDelimitedFrom(input, extensionRegistry);
16191          }
16192          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
16193              com.google.protobuf.CodedInputStream input)
16194              throws java.io.IOException {
16195            return PARSER.parseFrom(input);
16196          }
16197          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
16198              com.google.protobuf.CodedInputStream input,
16199              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16200              throws java.io.IOException {
16201            return PARSER.parseFrom(input, extensionRegistry);
16202          }
16203    
16204          public static Builder newBuilder() { return Builder.create(); }
16205          public Builder newBuilderForType() { return newBuilder(); }
16206          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry prototype) {
16207            return newBuilder().mergeFrom(prototype);
16208          }
16209          public Builder toBuilder() { return newBuilder(this); }
16210    
16211          @java.lang.Override
16212          protected Builder newBuilderForType(
16213              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16214            Builder builder = new Builder(parent);
16215            return builder;
16216          }
16217          /**
16218           * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry}
16219           */
16220          public static final class Builder extends
16221              com.google.protobuf.GeneratedMessage.Builder<Builder>
16222             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntryOrBuilder {
16223            public static final com.google.protobuf.Descriptors.Descriptor
16224                getDescriptor() {
16225              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
16226            }
16227    
16228            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16229                internalGetFieldAccessorTable() {
16230              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable
16231                  .ensureFieldAccessorsInitialized(
16232                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Builder.class);
16233            }
16234    
16235            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.newBuilder()
16236            private Builder() {
16237              maybeForceBuilderInitialization();
16238            }
16239    
16240            private Builder(
16241                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16242              super(parent);
16243              maybeForceBuilderInitialization();
16244            }
16245            private void maybeForceBuilderInitialization() {
16246              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
16247              }
16248            }
16249            private static Builder create() {
16250              return new Builder();
16251            }
16252    
16253            public Builder clear() {
16254              super.clear();
16255              type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
16256              bitField0_ = (bitField0_ & ~0x00000001);
16257              inodeId_ = 0L;
16258              bitField0_ = (bitField0_ & ~0x00000002);
16259              numOfDiff_ = 0;
16260              bitField0_ = (bitField0_ & ~0x00000004);
16261              return this;
16262            }
16263    
16264            public Builder clone() {
16265              return create().mergeFrom(buildPartial());
16266            }
16267    
16268            public com.google.protobuf.Descriptors.Descriptor
16269                getDescriptorForType() {
16270              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
16271            }
16272    
16273            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry getDefaultInstanceForType() {
16274              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.getDefaultInstance();
16275            }
16276    
16277            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry build() {
16278              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry result = buildPartial();
16279              if (!result.isInitialized()) {
16280                throw newUninitializedMessageException(result);
16281              }
16282              return result;
16283            }
16284    
16285            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry buildPartial() {
16286              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry(this);
16287              int from_bitField0_ = bitField0_;
16288              int to_bitField0_ = 0;
16289              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
16290                to_bitField0_ |= 0x00000001;
16291              }
16292              result.type_ = type_;
16293              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
16294                to_bitField0_ |= 0x00000002;
16295              }
16296              result.inodeId_ = inodeId_;
16297              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
16298                to_bitField0_ |= 0x00000004;
16299              }
16300              result.numOfDiff_ = numOfDiff_;
16301              result.bitField0_ = to_bitField0_;
16302              onBuilt();
16303              return result;
16304            }
16305    
16306            public Builder mergeFrom(com.google.protobuf.Message other) {
16307              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry) {
16308                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry)other);
16309              } else {
16310                super.mergeFrom(other);
16311                return this;
16312              }
16313            }
16314    
16315            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry other) {
16316              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.getDefaultInstance()) return this;
16317              if (other.hasType()) {
16318                setType(other.getType());
16319              }
16320              if (other.hasInodeId()) {
16321                setInodeId(other.getInodeId());
16322              }
16323              if (other.hasNumOfDiff()) {
16324                setNumOfDiff(other.getNumOfDiff());
16325              }
16326              this.mergeUnknownFields(other.getUnknownFields());
16327              return this;
16328            }
16329    
16330            public final boolean isInitialized() {
16331              if (!hasType()) {
16332                
16333                return false;
16334              }
16335              return true;
16336            }
16337    
16338            public Builder mergeFrom(
16339                com.google.protobuf.CodedInputStream input,
16340                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16341                throws java.io.IOException {
16342              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parsedMessage = null;
16343              try {
16344                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
16345              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16346                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry) e.getUnfinishedMessage();
16347                throw e;
16348              } finally {
16349                if (parsedMessage != null) {
16350                  mergeFrom(parsedMessage);
16351                }
16352              }
16353              return this;
16354            }
16355            private int bitField0_;
16356    
16357            // required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;
16358            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
16359            /**
16360             * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
16361             */
16362            public boolean hasType() {
16363              return ((bitField0_ & 0x00000001) == 0x00000001);
16364            }
16365            /**
16366             * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
16367             */
16368            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type getType() {
16369              return type_;
16370            }
16371            /**
16372             * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
16373             */
16374            public Builder setType(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type value) {
16375              if (value == null) {
16376                throw new NullPointerException();
16377              }
16378              bitField0_ |= 0x00000001;
16379              type_ = value;
16380              onChanged();
16381              return this;
16382            }
16383            /**
16384             * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
16385             */
16386            public Builder clearType() {
16387              bitField0_ = (bitField0_ & ~0x00000001);
16388              type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
16389              onChanged();
16390              return this;
16391            }
16392    
16393            // optional uint64 inodeId = 2;
16394            private long inodeId_ ;
16395            /**
16396             * <code>optional uint64 inodeId = 2;</code>
16397             */
16398            public boolean hasInodeId() {
16399              return ((bitField0_ & 0x00000002) == 0x00000002);
16400            }
16401            /**
16402             * <code>optional uint64 inodeId = 2;</code>
16403             */
16404            public long getInodeId() {
16405              return inodeId_;
16406            }
16407            /**
16408             * <code>optional uint64 inodeId = 2;</code>
16409             */
16410            public Builder setInodeId(long value) {
16411              bitField0_ |= 0x00000002;
16412              inodeId_ = value;
16413              onChanged();
16414              return this;
16415            }
16416            /**
16417             * <code>optional uint64 inodeId = 2;</code>
16418             */
16419            public Builder clearInodeId() {
16420              bitField0_ = (bitField0_ & ~0x00000002);
16421              inodeId_ = 0L;
16422              onChanged();
16423              return this;
16424            }
16425    
16426            // optional uint32 numOfDiff = 3;
16427            private int numOfDiff_ ;
16428            /**
16429             * <code>optional uint32 numOfDiff = 3;</code>
16430             */
16431            public boolean hasNumOfDiff() {
16432              return ((bitField0_ & 0x00000004) == 0x00000004);
16433            }
16434            /**
16435             * <code>optional uint32 numOfDiff = 3;</code>
16436             */
16437            public int getNumOfDiff() {
16438              return numOfDiff_;
16439            }
16440            /**
16441             * <code>optional uint32 numOfDiff = 3;</code>
16442             */
16443            public Builder setNumOfDiff(int value) {
16444              bitField0_ |= 0x00000004;
16445              numOfDiff_ = value;
16446              onChanged();
16447              return this;
16448            }
16449            /**
16450             * <code>optional uint32 numOfDiff = 3;</code>
16451             */
16452            public Builder clearNumOfDiff() {
16453              bitField0_ = (bitField0_ & ~0x00000004);
16454              numOfDiff_ = 0;
16455              onChanged();
16456              return this;
16457            }
16458    
16459            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry)
16460          }
16461    
16462          static {
16463            defaultInstance = new DiffEntry(true);
16464            defaultInstance.initFields();
16465          }
16466    
16467          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry)
16468        }
16469    
16470        private void initFields() {
16471        }
16472        private byte memoizedIsInitialized = -1;
16473        public final boolean isInitialized() {
16474          byte isInitialized = memoizedIsInitialized;
16475          if (isInitialized != -1) return isInitialized == 1;
16476    
16477          memoizedIsInitialized = 1;
16478          return true;
16479        }
16480    
16481        public void writeTo(com.google.protobuf.CodedOutputStream output)
16482                            throws java.io.IOException {
16483          getSerializedSize();
16484          getUnknownFields().writeTo(output);
16485        }
16486    
16487        private int memoizedSerializedSize = -1;
16488        public int getSerializedSize() {
16489          int size = memoizedSerializedSize;
16490          if (size != -1) return size;
16491    
16492          size = 0;
16493          size += getUnknownFields().getSerializedSize();
16494          memoizedSerializedSize = size;
16495          return size;
16496        }
16497    
16498        private static final long serialVersionUID = 0L;
16499        @java.lang.Override
16500        protected java.lang.Object writeReplace()
16501            throws java.io.ObjectStreamException {
16502          return super.writeReplace();
16503        }
16504    
16505        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
16506            com.google.protobuf.ByteString data)
16507            throws com.google.protobuf.InvalidProtocolBufferException {
16508          return PARSER.parseFrom(data);
16509        }
16510        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
16511            com.google.protobuf.ByteString data,
16512            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16513            throws com.google.protobuf.InvalidProtocolBufferException {
16514          return PARSER.parseFrom(data, extensionRegistry);
16515        }
16516        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(byte[] data)
16517            throws com.google.protobuf.InvalidProtocolBufferException {
16518          return PARSER.parseFrom(data);
16519        }
16520        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
16521            byte[] data,
16522            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16523            throws com.google.protobuf.InvalidProtocolBufferException {
16524          return PARSER.parseFrom(data, extensionRegistry);
16525        }
16526        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(java.io.InputStream input)
16527            throws java.io.IOException {
16528          return PARSER.parseFrom(input);
16529        }
16530        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
16531            java.io.InputStream input,
16532            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16533            throws java.io.IOException {
16534          return PARSER.parseFrom(input, extensionRegistry);
16535        }
16536        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseDelimitedFrom(java.io.InputStream input)
16537            throws java.io.IOException {
16538          return PARSER.parseDelimitedFrom(input);
16539        }
16540        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseDelimitedFrom(
16541            java.io.InputStream input,
16542            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16543            throws java.io.IOException {
16544          return PARSER.parseDelimitedFrom(input, extensionRegistry);
16545        }
16546        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
16547            com.google.protobuf.CodedInputStream input)
16548            throws java.io.IOException {
16549          return PARSER.parseFrom(input);
16550        }
16551        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
16552            com.google.protobuf.CodedInputStream input,
16553            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16554            throws java.io.IOException {
16555          return PARSER.parseFrom(input, extensionRegistry);
16556        }
16557    
16558        public static Builder newBuilder() { return Builder.create(); }
16559        public Builder newBuilderForType() { return newBuilder(); }
16560        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection prototype) {
16561          return newBuilder().mergeFrom(prototype);
16562        }
16563        public Builder toBuilder() { return newBuilder(this); }
16564    
16565        @java.lang.Override
16566        protected Builder newBuilderForType(
16567            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16568          Builder builder = new Builder(parent);
16569          return builder;
16570        }
16571        /**
16572         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection}
16573         *
16574         * <pre>
16575         **
16576         * This section records information about snapshot diffs
16577         * NAME: SNAPSHOT_DIFF
16578         * </pre>
16579         */
16580        public static final class Builder extends
16581            com.google.protobuf.GeneratedMessage.Builder<Builder>
16582           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSectionOrBuilder {
16583          public static final com.google.protobuf.Descriptors.Descriptor
16584              getDescriptor() {
16585            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
16586          }
16587    
16588          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16589              internalGetFieldAccessorTable() {
16590            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable
16591                .ensureFieldAccessorsInitialized(
16592                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.Builder.class);
16593          }
16594    
16595          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.newBuilder()
16596          private Builder() {
16597            maybeForceBuilderInitialization();
16598          }
16599    
16600          private Builder(
16601              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16602            super(parent);
16603            maybeForceBuilderInitialization();
16604          }
16605          private void maybeForceBuilderInitialization() {
16606            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
16607            }
16608          }
16609          private static Builder create() {
16610            return new Builder();
16611          }
16612    
16613          public Builder clear() {
16614            super.clear();
16615            return this;
16616          }
16617    
16618          public Builder clone() {
16619            return create().mergeFrom(buildPartial());
16620          }
16621    
16622          public com.google.protobuf.Descriptors.Descriptor
16623              getDescriptorForType() {
16624            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
16625          }
16626    
16627          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection getDefaultInstanceForType() {
16628            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.getDefaultInstance();
16629          }
16630    
16631          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection build() {
16632            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection result = buildPartial();
16633            if (!result.isInitialized()) {
16634              throw newUninitializedMessageException(result);
16635            }
16636            return result;
16637          }
16638    
16639          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection buildPartial() {
16640            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection(this);
16641            onBuilt();
16642            return result;
16643          }
16644    
16645          public Builder mergeFrom(com.google.protobuf.Message other) {
16646            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection) {
16647              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection)other);
16648            } else {
16649              super.mergeFrom(other);
16650              return this;
16651            }
16652          }
16653    
16654          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection other) {
16655            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.getDefaultInstance()) return this;
16656            this.mergeUnknownFields(other.getUnknownFields());
16657            return this;
16658          }
16659    
16660          public final boolean isInitialized() {
16661            return true;
16662          }
16663    
16664          public Builder mergeFrom(
16665              com.google.protobuf.CodedInputStream input,
16666              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16667              throws java.io.IOException {
16668            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parsedMessage = null;
16669            try {
16670              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
16671            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16672              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection) e.getUnfinishedMessage();
16673              throw e;
16674            } finally {
16675              if (parsedMessage != null) {
16676                mergeFrom(parsedMessage);
16677              }
16678            }
16679            return this;
16680          }
16681    
16682          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection)
16683        }
16684    
16685        static {
16686          defaultInstance = new SnapshotDiffSection(true);
16687          defaultInstance.initFields();
16688        }
16689    
16690        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection)
16691      }
16692    
16693      public interface StringTableSectionOrBuilder
16694          extends com.google.protobuf.MessageOrBuilder {
16695    
16696        // optional uint32 numEntry = 1;
16697        /**
16698         * <code>optional uint32 numEntry = 1;</code>
16699         *
16700         * <pre>
16701         * repeated Entry
16702         * </pre>
16703         */
16704        boolean hasNumEntry();
16705        /**
16706         * <code>optional uint32 numEntry = 1;</code>
16707         *
16708         * <pre>
16709         * repeated Entry
16710         * </pre>
16711         */
16712        int getNumEntry();
16713      }
16714      /**
16715       * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection}
16716       *
16717       * <pre>
16718       **
16719       * This section maps string to id
16720       * NAME: STRING_TABLE
16721       * </pre>
16722       */
16723      public static final class StringTableSection extends
16724          com.google.protobuf.GeneratedMessage
16725          implements StringTableSectionOrBuilder {
16726        // Use StringTableSection.newBuilder() to construct.
16727        private StringTableSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16728          super(builder);
16729          this.unknownFields = builder.getUnknownFields();
16730        }
16731        private StringTableSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16732    
16733        private static final StringTableSection defaultInstance;
16734        public static StringTableSection getDefaultInstance() {
16735          return defaultInstance;
16736        }
16737    
16738        public StringTableSection getDefaultInstanceForType() {
16739          return defaultInstance;
16740        }
16741    
16742        private final com.google.protobuf.UnknownFieldSet unknownFields;
16743        @java.lang.Override
16744        public final com.google.protobuf.UnknownFieldSet
16745            getUnknownFields() {
16746          return this.unknownFields;
16747        }
16748        private StringTableSection(
16749            com.google.protobuf.CodedInputStream input,
16750            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16751            throws com.google.protobuf.InvalidProtocolBufferException {
16752          initFields();
16753          int mutable_bitField0_ = 0;
16754          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16755              com.google.protobuf.UnknownFieldSet.newBuilder();
16756          try {
16757            boolean done = false;
16758            while (!done) {
16759              int tag = input.readTag();
16760              switch (tag) {
16761                case 0:
16762                  done = true;
16763                  break;
16764                default: {
16765                  if (!parseUnknownField(input, unknownFields,
16766                                         extensionRegistry, tag)) {
16767                    done = true;
16768                  }
16769                  break;
16770                }
16771                case 8: {
16772                  bitField0_ |= 0x00000001;
16773                  numEntry_ = input.readUInt32();
16774                  break;
16775                }
16776              }
16777            }
16778          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16779            throw e.setUnfinishedMessage(this);
16780          } catch (java.io.IOException e) {
16781            throw new com.google.protobuf.InvalidProtocolBufferException(
16782                e.getMessage()).setUnfinishedMessage(this);
16783          } finally {
16784            this.unknownFields = unknownFields.build();
16785            makeExtensionsImmutable();
16786          }
16787        }
16788        public static final com.google.protobuf.Descriptors.Descriptor
16789            getDescriptor() {
16790          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
16791        }
16792    
16793        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16794            internalGetFieldAccessorTable() {
16795          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable
16796              .ensureFieldAccessorsInitialized(
16797                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Builder.class);
16798        }
16799    
16800        public static com.google.protobuf.Parser<StringTableSection> PARSER =
16801            new com.google.protobuf.AbstractParser<StringTableSection>() {
16802          public StringTableSection parsePartialFrom(
16803              com.google.protobuf.CodedInputStream input,
16804              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16805              throws com.google.protobuf.InvalidProtocolBufferException {
16806            return new StringTableSection(input, extensionRegistry);
16807          }
16808        };
16809    
16810        @java.lang.Override
16811        public com.google.protobuf.Parser<StringTableSection> getParserForType() {
16812          return PARSER;
16813        }
16814    
16815        public interface EntryOrBuilder
16816            extends com.google.protobuf.MessageOrBuilder {
16817    
16818          // optional uint32 id = 1;
16819          /**
16820           * <code>optional uint32 id = 1;</code>
16821           */
16822          boolean hasId();
16823          /**
16824           * <code>optional uint32 id = 1;</code>
16825           */
16826          int getId();
16827    
16828          // optional string str = 2;
16829          /**
16830           * <code>optional string str = 2;</code>
16831           */
16832          boolean hasStr();
16833          /**
16834           * <code>optional string str = 2;</code>
16835           */
16836          java.lang.String getStr();
16837          /**
16838           * <code>optional string str = 2;</code>
16839           */
16840          com.google.protobuf.ByteString
16841              getStrBytes();
16842        }
16843        /**
16844         * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection.Entry}
16845         */
16846        public static final class Entry extends
16847            com.google.protobuf.GeneratedMessage
16848            implements EntryOrBuilder {
16849          // Use Entry.newBuilder() to construct.
16850          private Entry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16851            super(builder);
16852            this.unknownFields = builder.getUnknownFields();
16853          }
16854          private Entry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16855    
16856          private static final Entry defaultInstance;
16857          public static Entry getDefaultInstance() {
16858            return defaultInstance;
16859          }
16860    
16861          public Entry getDefaultInstanceForType() {
16862            return defaultInstance;
16863          }
16864    
16865          private final com.google.protobuf.UnknownFieldSet unknownFields;
16866          @java.lang.Override
16867          public final com.google.protobuf.UnknownFieldSet
16868              getUnknownFields() {
16869            return this.unknownFields;
16870          }
16871          private Entry(
16872              com.google.protobuf.CodedInputStream input,
16873              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16874              throws com.google.protobuf.InvalidProtocolBufferException {
16875            initFields();
16876            int mutable_bitField0_ = 0;
16877            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16878                com.google.protobuf.UnknownFieldSet.newBuilder();
16879            try {
16880              boolean done = false;
16881              while (!done) {
16882                int tag = input.readTag();
16883                switch (tag) {
16884                  case 0:
16885                    done = true;
16886                    break;
16887                  default: {
16888                    if (!parseUnknownField(input, unknownFields,
16889                                           extensionRegistry, tag)) {
16890                      done = true;
16891                    }
16892                    break;
16893                  }
16894                  case 8: {
16895                    bitField0_ |= 0x00000001;
16896                    id_ = input.readUInt32();
16897                    break;
16898                  }
16899                  case 18: {
16900                    bitField0_ |= 0x00000002;
16901                    str_ = input.readBytes();
16902                    break;
16903                  }
16904                }
16905              }
16906            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16907              throw e.setUnfinishedMessage(this);
16908            } catch (java.io.IOException e) {
16909              throw new com.google.protobuf.InvalidProtocolBufferException(
16910                  e.getMessage()).setUnfinishedMessage(this);
16911            } finally {
16912              this.unknownFields = unknownFields.build();
16913              makeExtensionsImmutable();
16914            }
16915          }
16916          public static final com.google.protobuf.Descriptors.Descriptor
16917              getDescriptor() {
16918            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
16919          }
16920    
16921          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16922              internalGetFieldAccessorTable() {
16923            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable
16924                .ensureFieldAccessorsInitialized(
16925                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.Builder.class);
16926          }
16927    
16928          public static com.google.protobuf.Parser<Entry> PARSER =
16929              new com.google.protobuf.AbstractParser<Entry>() {
16930            public Entry parsePartialFrom(
16931                com.google.protobuf.CodedInputStream input,
16932                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16933                throws com.google.protobuf.InvalidProtocolBufferException {
16934              return new Entry(input, extensionRegistry);
16935            }
16936          };
16937    
16938          @java.lang.Override
16939          public com.google.protobuf.Parser<Entry> getParserForType() {
16940            return PARSER;
16941          }
16942    
16943          private int bitField0_;
16944          // optional uint32 id = 1;
16945          public static final int ID_FIELD_NUMBER = 1;
16946          private int id_;
16947          /**
16948           * <code>optional uint32 id = 1;</code>
16949           */
16950          public boolean hasId() {
16951            return ((bitField0_ & 0x00000001) == 0x00000001);
16952          }
16953          /**
16954           * <code>optional uint32 id = 1;</code>
16955           */
16956          public int getId() {
16957            return id_;
16958          }
16959    
16960          // optional string str = 2;
16961          public static final int STR_FIELD_NUMBER = 2;
16962          private java.lang.Object str_;
16963          /**
16964           * <code>optional string str = 2;</code>
16965           */
16966          public boolean hasStr() {
16967            return ((bitField0_ & 0x00000002) == 0x00000002);
16968          }
16969          /**
16970           * <code>optional string str = 2;</code>
16971           */
16972          public java.lang.String getStr() {
16973            java.lang.Object ref = str_;
16974            if (ref instanceof java.lang.String) {
16975              return (java.lang.String) ref;
16976            } else {
16977              com.google.protobuf.ByteString bs = 
16978                  (com.google.protobuf.ByteString) ref;
16979              java.lang.String s = bs.toStringUtf8();
16980              if (bs.isValidUtf8()) {
16981                str_ = s;
16982              }
16983              return s;
16984            }
16985          }
16986          /**
16987           * <code>optional string str = 2;</code>
16988           */
16989          public com.google.protobuf.ByteString
16990              getStrBytes() {
16991            java.lang.Object ref = str_;
16992            if (ref instanceof java.lang.String) {
16993              com.google.protobuf.ByteString b = 
16994                  com.google.protobuf.ByteString.copyFromUtf8(
16995                      (java.lang.String) ref);
16996              str_ = b;
16997              return b;
16998            } else {
16999              return (com.google.protobuf.ByteString) ref;
17000            }
17001          }
17002    
17003          private void initFields() {
17004            id_ = 0;
17005            str_ = "";
17006          }
17007          private byte memoizedIsInitialized = -1;
17008          public final boolean isInitialized() {
17009            byte isInitialized = memoizedIsInitialized;
17010            if (isInitialized != -1) return isInitialized == 1;
17011    
17012            memoizedIsInitialized = 1;
17013            return true;
17014          }
17015    
17016          public void writeTo(com.google.protobuf.CodedOutputStream output)
17017                              throws java.io.IOException {
17018            getSerializedSize();
17019            if (((bitField0_ & 0x00000001) == 0x00000001)) {
17020              output.writeUInt32(1, id_);
17021            }
17022            if (((bitField0_ & 0x00000002) == 0x00000002)) {
17023              output.writeBytes(2, getStrBytes());
17024            }
17025            getUnknownFields().writeTo(output);
17026          }
17027    
17028          private int memoizedSerializedSize = -1;
17029          public int getSerializedSize() {
17030            int size = memoizedSerializedSize;
17031            if (size != -1) return size;
17032    
17033            size = 0;
17034            if (((bitField0_ & 0x00000001) == 0x00000001)) {
17035              size += com.google.protobuf.CodedOutputStream
17036                .computeUInt32Size(1, id_);
17037            }
17038            if (((bitField0_ & 0x00000002) == 0x00000002)) {
17039              size += com.google.protobuf.CodedOutputStream
17040                .computeBytesSize(2, getStrBytes());
17041            }
17042            size += getUnknownFields().getSerializedSize();
17043            memoizedSerializedSize = size;
17044            return size;
17045          }
17046    
17047          private static final long serialVersionUID = 0L;
17048          @java.lang.Override
17049          protected java.lang.Object writeReplace()
17050              throws java.io.ObjectStreamException {
17051            return super.writeReplace();
17052          }
17053    
17054          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
17055              com.google.protobuf.ByteString data)
17056              throws com.google.protobuf.InvalidProtocolBufferException {
17057            return PARSER.parseFrom(data);
17058          }
17059          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
17060              com.google.protobuf.ByteString data,
17061              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17062              throws com.google.protobuf.InvalidProtocolBufferException {
17063            return PARSER.parseFrom(data, extensionRegistry);
17064          }
17065          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(byte[] data)
17066              throws com.google.protobuf.InvalidProtocolBufferException {
17067            return PARSER.parseFrom(data);
17068          }
17069          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
17070              byte[] data,
17071              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17072              throws com.google.protobuf.InvalidProtocolBufferException {
17073            return PARSER.parseFrom(data, extensionRegistry);
17074          }
17075          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(java.io.InputStream input)
17076              throws java.io.IOException {
17077            return PARSER.parseFrom(input);
17078          }
17079          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
17080              java.io.InputStream input,
17081              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17082              throws java.io.IOException {
17083            return PARSER.parseFrom(input, extensionRegistry);
17084          }
17085          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseDelimitedFrom(java.io.InputStream input)
17086              throws java.io.IOException {
17087            return PARSER.parseDelimitedFrom(input);
17088          }
17089          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseDelimitedFrom(
17090              java.io.InputStream input,
17091              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17092              throws java.io.IOException {
17093            return PARSER.parseDelimitedFrom(input, extensionRegistry);
17094          }
17095          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
17096              com.google.protobuf.CodedInputStream input)
17097              throws java.io.IOException {
17098            return PARSER.parseFrom(input);
17099          }
17100          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
17101              com.google.protobuf.CodedInputStream input,
17102              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17103              throws java.io.IOException {
17104            return PARSER.parseFrom(input, extensionRegistry);
17105          }
17106    
17107          public static Builder newBuilder() { return Builder.create(); }
17108          public Builder newBuilderForType() { return newBuilder(); }
17109          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry prototype) {
17110            return newBuilder().mergeFrom(prototype);
17111          }
17112          public Builder toBuilder() { return newBuilder(this); }
17113    
17114          @java.lang.Override
17115          protected Builder newBuilderForType(
17116              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17117            Builder builder = new Builder(parent);
17118            return builder;
17119          }
17120          /**
17121           * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection.Entry}
17122           */
17123          public static final class Builder extends
17124              com.google.protobuf.GeneratedMessage.Builder<Builder>
17125             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.EntryOrBuilder {
17126            public static final com.google.protobuf.Descriptors.Descriptor
17127                getDescriptor() {
17128              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
17129            }
17130    
17131            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17132                internalGetFieldAccessorTable() {
17133              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable
17134                  .ensureFieldAccessorsInitialized(
17135                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.Builder.class);
17136            }
17137    
17138            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.newBuilder()
17139            private Builder() {
17140              maybeForceBuilderInitialization();
17141            }
17142    
17143            private Builder(
17144                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17145              super(parent);
17146              maybeForceBuilderInitialization();
17147            }
17148            private void maybeForceBuilderInitialization() {
17149              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17150              }
17151            }
17152            private static Builder create() {
17153              return new Builder();
17154            }
17155    
17156            public Builder clear() {
17157              super.clear();
17158              id_ = 0;
17159              bitField0_ = (bitField0_ & ~0x00000001);
17160              str_ = "";
17161              bitField0_ = (bitField0_ & ~0x00000002);
17162              return this;
17163            }
17164    
17165            public Builder clone() {
17166              return create().mergeFrom(buildPartial());
17167            }
17168    
17169            public com.google.protobuf.Descriptors.Descriptor
17170                getDescriptorForType() {
17171              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
17172            }
17173    
17174            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry getDefaultInstanceForType() {
17175              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.getDefaultInstance();
17176            }
17177    
17178            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry build() {
17179              org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry result = buildPartial();
17180              if (!result.isInitialized()) {
17181                throw newUninitializedMessageException(result);
17182              }
17183              return result;
17184            }
17185    
17186            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry buildPartial() {
17187              org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry(this);
17188              int from_bitField0_ = bitField0_;
17189              int to_bitField0_ = 0;
17190              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
17191                to_bitField0_ |= 0x00000001;
17192              }
17193              result.id_ = id_;
17194              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
17195                to_bitField0_ |= 0x00000002;
17196              }
17197              result.str_ = str_;
17198              result.bitField0_ = to_bitField0_;
17199              onBuilt();
17200              return result;
17201            }
17202    
17203            public Builder mergeFrom(com.google.protobuf.Message other) {
17204              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry) {
17205                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry)other);
17206              } else {
17207                super.mergeFrom(other);
17208                return this;
17209              }
17210            }
17211    
17212            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry other) {
17213              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.getDefaultInstance()) return this;
17214              if (other.hasId()) {
17215                setId(other.getId());
17216              }
17217              if (other.hasStr()) {
17218                bitField0_ |= 0x00000002;
17219                str_ = other.str_;
17220                onChanged();
17221              }
17222              this.mergeUnknownFields(other.getUnknownFields());
17223              return this;
17224            }
17225    
17226            public final boolean isInitialized() {
17227              return true;
17228            }
17229    
17230            public Builder mergeFrom(
17231                com.google.protobuf.CodedInputStream input,
17232                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17233                throws java.io.IOException {
17234              org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parsedMessage = null;
17235              try {
17236                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17237              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17238                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry) e.getUnfinishedMessage();
17239                throw e;
17240              } finally {
17241                if (parsedMessage != null) {
17242                  mergeFrom(parsedMessage);
17243                }
17244              }
17245              return this;
17246            }
17247            private int bitField0_;
17248    
17249            // optional uint32 id = 1;
17250            private int id_ ;
17251            /**
17252             * <code>optional uint32 id = 1;</code>
17253             */
17254            public boolean hasId() {
17255              return ((bitField0_ & 0x00000001) == 0x00000001);
17256            }
17257            /**
17258             * <code>optional uint32 id = 1;</code>
17259             */
17260            public int getId() {
17261              return id_;
17262            }
17263            /**
17264             * <code>optional uint32 id = 1;</code>
17265             */
17266            public Builder setId(int value) {
17267              bitField0_ |= 0x00000001;
17268              id_ = value;
17269              onChanged();
17270              return this;
17271            }
17272            /**
17273             * <code>optional uint32 id = 1;</code>
17274             */
17275            public Builder clearId() {
17276              bitField0_ = (bitField0_ & ~0x00000001);
17277              id_ = 0;
17278              onChanged();
17279              return this;
17280            }
17281    
17282            // optional string str = 2;
17283            private java.lang.Object str_ = "";
17284            /**
17285             * <code>optional string str = 2;</code>
17286             */
17287            public boolean hasStr() {
17288              return ((bitField0_ & 0x00000002) == 0x00000002);
17289            }
17290            /**
17291             * <code>optional string str = 2;</code>
17292             */
17293            public java.lang.String getStr() {
17294              java.lang.Object ref = str_;
17295              if (!(ref instanceof java.lang.String)) {
17296                java.lang.String s = ((com.google.protobuf.ByteString) ref)
17297                    .toStringUtf8();
17298                str_ = s;
17299                return s;
17300              } else {
17301                return (java.lang.String) ref;
17302              }
17303            }
17304            /**
17305             * <code>optional string str = 2;</code>
17306             */
17307            public com.google.protobuf.ByteString
17308                getStrBytes() {
17309              java.lang.Object ref = str_;
17310              if (ref instanceof String) {
17311                com.google.protobuf.ByteString b = 
17312                    com.google.protobuf.ByteString.copyFromUtf8(
17313                        (java.lang.String) ref);
17314                str_ = b;
17315                return b;
17316              } else {
17317                return (com.google.protobuf.ByteString) ref;
17318              }
17319            }
17320            /**
17321             * <code>optional string str = 2;</code>
17322             */
17323            public Builder setStr(
17324                java.lang.String value) {
17325              if (value == null) {
17326        throw new NullPointerException();
17327      }
17328      bitField0_ |= 0x00000002;
17329              str_ = value;
17330              onChanged();
17331              return this;
17332            }
17333            /**
17334             * <code>optional string str = 2;</code>
17335             */
17336            public Builder clearStr() {
17337              bitField0_ = (bitField0_ & ~0x00000002);
17338              str_ = getDefaultInstance().getStr();
17339              onChanged();
17340              return this;
17341            }
17342            /**
17343             * <code>optional string str = 2;</code>
17344             */
17345            public Builder setStrBytes(
17346                com.google.protobuf.ByteString value) {
17347              if (value == null) {
17348        throw new NullPointerException();
17349      }
17350      bitField0_ |= 0x00000002;
17351              str_ = value;
17352              onChanged();
17353              return this;
17354            }
17355    
17356            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.StringTableSection.Entry)
17357          }
17358    
17359          static {
17360            defaultInstance = new Entry(true);
17361            defaultInstance.initFields();
17362          }
17363    
17364          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.StringTableSection.Entry)
17365        }
17366    
17367        private int bitField0_;
17368        // optional uint32 numEntry = 1;
17369        public static final int NUMENTRY_FIELD_NUMBER = 1;
17370        private int numEntry_;
17371        /**
17372         * <code>optional uint32 numEntry = 1;</code>
17373         *
17374         * <pre>
17375         * repeated Entry
17376         * </pre>
17377         */
17378        public boolean hasNumEntry() {
17379          return ((bitField0_ & 0x00000001) == 0x00000001);
17380        }
17381        /**
17382         * <code>optional uint32 numEntry = 1;</code>
17383         *
17384         * <pre>
17385         * repeated Entry
17386         * </pre>
17387         */
17388        public int getNumEntry() {
17389          return numEntry_;
17390        }
17391    
17392        private void initFields() {
17393          numEntry_ = 0;
17394        }
17395        private byte memoizedIsInitialized = -1;
17396        public final boolean isInitialized() {
17397          byte isInitialized = memoizedIsInitialized;
17398          if (isInitialized != -1) return isInitialized == 1;
17399    
17400          memoizedIsInitialized = 1;
17401          return true;
17402        }
17403    
17404        public void writeTo(com.google.protobuf.CodedOutputStream output)
17405                            throws java.io.IOException {
17406          getSerializedSize();
17407          if (((bitField0_ & 0x00000001) == 0x00000001)) {
17408            output.writeUInt32(1, numEntry_);
17409          }
17410          getUnknownFields().writeTo(output);
17411        }
17412    
17413        private int memoizedSerializedSize = -1;
17414        public int getSerializedSize() {
17415          int size = memoizedSerializedSize;
17416          if (size != -1) return size;
17417    
17418          size = 0;
17419          if (((bitField0_ & 0x00000001) == 0x00000001)) {
17420            size += com.google.protobuf.CodedOutputStream
17421              .computeUInt32Size(1, numEntry_);
17422          }
17423          size += getUnknownFields().getSerializedSize();
17424          memoizedSerializedSize = size;
17425          return size;
17426        }
17427    
17428        private static final long serialVersionUID = 0L;
17429        @java.lang.Override
17430        protected java.lang.Object writeReplace()
17431            throws java.io.ObjectStreamException {
17432          return super.writeReplace();
17433        }
17434    
17435        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
17436            com.google.protobuf.ByteString data)
17437            throws com.google.protobuf.InvalidProtocolBufferException {
17438          return PARSER.parseFrom(data);
17439        }
17440        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
17441            com.google.protobuf.ByteString data,
17442            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17443            throws com.google.protobuf.InvalidProtocolBufferException {
17444          return PARSER.parseFrom(data, extensionRegistry);
17445        }
17446        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(byte[] data)
17447            throws com.google.protobuf.InvalidProtocolBufferException {
17448          return PARSER.parseFrom(data);
17449        }
17450        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
17451            byte[] data,
17452            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17453            throws com.google.protobuf.InvalidProtocolBufferException {
17454          return PARSER.parseFrom(data, extensionRegistry);
17455        }
17456        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(java.io.InputStream input)
17457            throws java.io.IOException {
17458          return PARSER.parseFrom(input);
17459        }
17460        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
17461            java.io.InputStream input,
17462            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17463            throws java.io.IOException {
17464          return PARSER.parseFrom(input, extensionRegistry);
17465        }
17466        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseDelimitedFrom(java.io.InputStream input)
17467            throws java.io.IOException {
17468          return PARSER.parseDelimitedFrom(input);
17469        }
17470        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseDelimitedFrom(
17471            java.io.InputStream input,
17472            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17473            throws java.io.IOException {
17474          return PARSER.parseDelimitedFrom(input, extensionRegistry);
17475        }
17476        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
17477            com.google.protobuf.CodedInputStream input)
17478            throws java.io.IOException {
17479          return PARSER.parseFrom(input);
17480        }
17481        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
17482            com.google.protobuf.CodedInputStream input,
17483            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17484            throws java.io.IOException {
17485          return PARSER.parseFrom(input, extensionRegistry);
17486        }
17487    
17488        public static Builder newBuilder() { return Builder.create(); }
17489        public Builder newBuilderForType() { return newBuilder(); }
17490        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection prototype) {
17491          return newBuilder().mergeFrom(prototype);
17492        }
17493        public Builder toBuilder() { return newBuilder(this); }
17494    
17495        @java.lang.Override
17496        protected Builder newBuilderForType(
17497            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17498          Builder builder = new Builder(parent);
17499          return builder;
17500        }
17501        /**
17502         * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection}
17503         *
17504         * <pre>
17505         **
17506         * This section maps string to id
17507         * NAME: STRING_TABLE
17508         * </pre>
17509         */
17510        public static final class Builder extends
17511            com.google.protobuf.GeneratedMessage.Builder<Builder>
17512           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSectionOrBuilder {
17513          public static final com.google.protobuf.Descriptors.Descriptor
17514              getDescriptor() {
17515            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
17516          }
17517    
17518          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17519              internalGetFieldAccessorTable() {
17520            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable
17521                .ensureFieldAccessorsInitialized(
17522                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Builder.class);
17523          }
17524    
17525          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.newBuilder()
17526          private Builder() {
17527            maybeForceBuilderInitialization();
17528          }
17529    
17530          private Builder(
17531              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17532            super(parent);
17533            maybeForceBuilderInitialization();
17534          }
17535          private void maybeForceBuilderInitialization() {
17536            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17537            }
17538          }
17539          private static Builder create() {
17540            return new Builder();
17541          }
17542    
17543          public Builder clear() {
17544            super.clear();
17545            numEntry_ = 0;
17546            bitField0_ = (bitField0_ & ~0x00000001);
17547            return this;
17548          }
17549    
17550          public Builder clone() {
17551            return create().mergeFrom(buildPartial());
17552          }
17553    
17554          public com.google.protobuf.Descriptors.Descriptor
17555              getDescriptorForType() {
17556            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
17557          }
17558    
17559          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection getDefaultInstanceForType() {
17560            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.getDefaultInstance();
17561          }
17562    
17563          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection build() {
17564            org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection result = buildPartial();
17565            if (!result.isInitialized()) {
17566              throw newUninitializedMessageException(result);
17567            }
17568            return result;
17569          }
17570    
17571          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection buildPartial() {
17572            org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection(this);
17573            int from_bitField0_ = bitField0_;
17574            int to_bitField0_ = 0;
17575            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
17576              to_bitField0_ |= 0x00000001;
17577            }
17578            result.numEntry_ = numEntry_;
17579            result.bitField0_ = to_bitField0_;
17580            onBuilt();
17581            return result;
17582          }
17583    
17584          public Builder mergeFrom(com.google.protobuf.Message other) {
17585            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection) {
17586              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection)other);
17587            } else {
17588              super.mergeFrom(other);
17589              return this;
17590            }
17591          }
17592    
17593          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection other) {
17594            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.getDefaultInstance()) return this;
17595            if (other.hasNumEntry()) {
17596              setNumEntry(other.getNumEntry());
17597            }
17598            this.mergeUnknownFields(other.getUnknownFields());
17599            return this;
17600          }
17601    
17602          public final boolean isInitialized() {
17603            return true;
17604          }
17605    
17606          public Builder mergeFrom(
17607              com.google.protobuf.CodedInputStream input,
17608              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17609              throws java.io.IOException {
17610            org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parsedMessage = null;
17611            try {
17612              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17613            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17614              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection) e.getUnfinishedMessage();
17615              throw e;
17616            } finally {
17617              if (parsedMessage != null) {
17618                mergeFrom(parsedMessage);
17619              }
17620            }
17621            return this;
17622          }
17623          private int bitField0_;
17624    
17625          // optional uint32 numEntry = 1;
17626          private int numEntry_ ;
17627          /**
17628           * <code>optional uint32 numEntry = 1;</code>
17629           *
17630           * <pre>
17631           * repeated Entry
17632           * </pre>
17633           */
17634          public boolean hasNumEntry() {
17635            return ((bitField0_ & 0x00000001) == 0x00000001);
17636          }
17637          /**
17638           * <code>optional uint32 numEntry = 1;</code>
17639           *
17640           * <pre>
17641           * repeated Entry
17642           * </pre>
17643           */
17644          public int getNumEntry() {
17645            return numEntry_;
17646          }
17647          /**
17648           * <code>optional uint32 numEntry = 1;</code>
17649           *
17650           * <pre>
17651           * repeated Entry
17652           * </pre>
17653           */
17654          public Builder setNumEntry(int value) {
17655            bitField0_ |= 0x00000001;
17656            numEntry_ = value;
17657            onChanged();
17658            return this;
17659          }
17660          /**
17661           * <code>optional uint32 numEntry = 1;</code>
17662           *
17663           * <pre>
17664           * repeated Entry
17665           * </pre>
17666           */
17667          public Builder clearNumEntry() {
17668            bitField0_ = (bitField0_ & ~0x00000001);
17669            numEntry_ = 0;
17670            onChanged();
17671            return this;
17672          }
17673    
17674          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.StringTableSection)
17675        }
17676    
17677        static {
17678          defaultInstance = new StringTableSection(true);
17679          defaultInstance.initFields();
17680        }
17681    
17682        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.StringTableSection)
17683      }
17684    
17685      public interface SecretManagerSectionOrBuilder
17686          extends com.google.protobuf.MessageOrBuilder {
17687    
17688        // optional uint32 currentId = 1;
17689        /**
17690         * <code>optional uint32 currentId = 1;</code>
17691         */
17692        boolean hasCurrentId();
17693        /**
17694         * <code>optional uint32 currentId = 1;</code>
17695         */
17696        int getCurrentId();
17697    
17698        // optional uint32 tokenSequenceNumber = 2;
17699        /**
17700         * <code>optional uint32 tokenSequenceNumber = 2;</code>
17701         */
17702        boolean hasTokenSequenceNumber();
17703        /**
17704         * <code>optional uint32 tokenSequenceNumber = 2;</code>
17705         */
17706        int getTokenSequenceNumber();
17707    
17708        // optional uint32 numKeys = 3;
17709        /**
17710         * <code>optional uint32 numKeys = 3;</code>
17711         */
17712        boolean hasNumKeys();
17713        /**
17714         * <code>optional uint32 numKeys = 3;</code>
17715         */
17716        int getNumKeys();
17717    
17718        // optional uint32 numTokens = 4;
17719        /**
17720         * <code>optional uint32 numTokens = 4;</code>
17721         *
17722         * <pre>
17723         * repeated DelegationKey keys
17724         * repeated PersistToken tokens
17725         * </pre>
17726         */
17727        boolean hasNumTokens();
17728        /**
17729         * <code>optional uint32 numTokens = 4;</code>
17730         *
17731         * <pre>
17732         * repeated DelegationKey keys
17733         * repeated PersistToken tokens
17734         * </pre>
17735         */
17736        int getNumTokens();
17737      }
17738      /**
17739       * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection}
17740       */
17741      public static final class SecretManagerSection extends
17742          com.google.protobuf.GeneratedMessage
17743          implements SecretManagerSectionOrBuilder {
17744        // Use SecretManagerSection.newBuilder() to construct.
17745        private SecretManagerSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17746          super(builder);
17747          this.unknownFields = builder.getUnknownFields();
17748        }
17749        private SecretManagerSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
17750    
17751        private static final SecretManagerSection defaultInstance;
17752        public static SecretManagerSection getDefaultInstance() {
17753          return defaultInstance;
17754        }
17755    
17756        public SecretManagerSection getDefaultInstanceForType() {
17757          return defaultInstance;
17758        }
17759    
17760        private final com.google.protobuf.UnknownFieldSet unknownFields;
17761        @java.lang.Override
17762        public final com.google.protobuf.UnknownFieldSet
17763            getUnknownFields() {
17764          return this.unknownFields;
17765        }
17766        private SecretManagerSection(
17767            com.google.protobuf.CodedInputStream input,
17768            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17769            throws com.google.protobuf.InvalidProtocolBufferException {
17770          initFields();
17771          int mutable_bitField0_ = 0;
17772          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
17773              com.google.protobuf.UnknownFieldSet.newBuilder();
17774          try {
17775            boolean done = false;
17776            while (!done) {
17777              int tag = input.readTag();
17778              switch (tag) {
17779                case 0:
17780                  done = true;
17781                  break;
17782                default: {
17783                  if (!parseUnknownField(input, unknownFields,
17784                                         extensionRegistry, tag)) {
17785                    done = true;
17786                  }
17787                  break;
17788                }
17789                case 8: {
17790                  bitField0_ |= 0x00000001;
17791                  currentId_ = input.readUInt32();
17792                  break;
17793                }
17794                case 16: {
17795                  bitField0_ |= 0x00000002;
17796                  tokenSequenceNumber_ = input.readUInt32();
17797                  break;
17798                }
17799                case 24: {
17800                  bitField0_ |= 0x00000004;
17801                  numKeys_ = input.readUInt32();
17802                  break;
17803                }
17804                case 32: {
17805                  bitField0_ |= 0x00000008;
17806                  numTokens_ = input.readUInt32();
17807                  break;
17808                }
17809              }
17810            }
17811          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17812            throw e.setUnfinishedMessage(this);
17813          } catch (java.io.IOException e) {
17814            throw new com.google.protobuf.InvalidProtocolBufferException(
17815                e.getMessage()).setUnfinishedMessage(this);
17816          } finally {
17817            this.unknownFields = unknownFields.build();
17818            makeExtensionsImmutable();
17819          }
17820        }
17821        public static final com.google.protobuf.Descriptors.Descriptor
17822            getDescriptor() {
17823          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
17824        }
17825    
17826        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17827            internalGetFieldAccessorTable() {
17828          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable
17829              .ensureFieldAccessorsInitialized(
17830                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.Builder.class);
17831        }
17832    
17833        public static com.google.protobuf.Parser<SecretManagerSection> PARSER =
17834            new com.google.protobuf.AbstractParser<SecretManagerSection>() {
17835          public SecretManagerSection parsePartialFrom(
17836              com.google.protobuf.CodedInputStream input,
17837              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17838              throws com.google.protobuf.InvalidProtocolBufferException {
17839            return new SecretManagerSection(input, extensionRegistry);
17840          }
17841        };
17842    
17843        @java.lang.Override
17844        public com.google.protobuf.Parser<SecretManagerSection> getParserForType() {
17845          return PARSER;
17846        }
17847    
17848        public interface DelegationKeyOrBuilder
17849            extends com.google.protobuf.MessageOrBuilder {
17850    
17851          // optional uint32 id = 1;
17852          /**
17853           * <code>optional uint32 id = 1;</code>
17854           */
17855          boolean hasId();
17856          /**
17857           * <code>optional uint32 id = 1;</code>
17858           */
17859          int getId();
17860    
17861          // optional uint64 expiryDate = 2;
17862          /**
17863           * <code>optional uint64 expiryDate = 2;</code>
17864           */
17865          boolean hasExpiryDate();
17866          /**
17867           * <code>optional uint64 expiryDate = 2;</code>
17868           */
17869          long getExpiryDate();
17870    
17871          // optional bytes key = 3;
17872          /**
17873           * <code>optional bytes key = 3;</code>
17874           */
17875          boolean hasKey();
17876          /**
17877           * <code>optional bytes key = 3;</code>
17878           */
17879          com.google.protobuf.ByteString getKey();
17880        }
17881        /**
17882         * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey}
17883         */
17884        public static final class DelegationKey extends
17885            com.google.protobuf.GeneratedMessage
17886            implements DelegationKeyOrBuilder {
17887          // Use DelegationKey.newBuilder() to construct.
17888          private DelegationKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17889            super(builder);
17890            this.unknownFields = builder.getUnknownFields();
17891          }
17892          private DelegationKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
17893    
17894          private static final DelegationKey defaultInstance;
17895          public static DelegationKey getDefaultInstance() {
17896            return defaultInstance;
17897          }
17898    
17899          public DelegationKey getDefaultInstanceForType() {
17900            return defaultInstance;
17901          }
17902    
17903          private final com.google.protobuf.UnknownFieldSet unknownFields;
17904          @java.lang.Override
17905          public final com.google.protobuf.UnknownFieldSet
17906              getUnknownFields() {
17907            return this.unknownFields;
17908          }
17909          private DelegationKey(
17910              com.google.protobuf.CodedInputStream input,
17911              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17912              throws com.google.protobuf.InvalidProtocolBufferException {
17913            initFields();
17914            int mutable_bitField0_ = 0;
17915            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
17916                com.google.protobuf.UnknownFieldSet.newBuilder();
17917            try {
17918              boolean done = false;
17919              while (!done) {
17920                int tag = input.readTag();
17921                switch (tag) {
17922                  case 0:
17923                    done = true;
17924                    break;
17925                  default: {
17926                    if (!parseUnknownField(input, unknownFields,
17927                                           extensionRegistry, tag)) {
17928                      done = true;
17929                    }
17930                    break;
17931                  }
17932                  case 8: {
17933                    bitField0_ |= 0x00000001;
17934                    id_ = input.readUInt32();
17935                    break;
17936                  }
17937                  case 16: {
17938                    bitField0_ |= 0x00000002;
17939                    expiryDate_ = input.readUInt64();
17940                    break;
17941                  }
17942                  case 26: {
17943                    bitField0_ |= 0x00000004;
17944                    key_ = input.readBytes();
17945                    break;
17946                  }
17947                }
17948              }
17949            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17950              throw e.setUnfinishedMessage(this);
17951            } catch (java.io.IOException e) {
17952              throw new com.google.protobuf.InvalidProtocolBufferException(
17953                  e.getMessage()).setUnfinishedMessage(this);
17954            } finally {
17955              this.unknownFields = unknownFields.build();
17956              makeExtensionsImmutable();
17957            }
17958          }
17959          public static final com.google.protobuf.Descriptors.Descriptor
17960              getDescriptor() {
17961            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
17962          }
17963    
17964          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17965              internalGetFieldAccessorTable() {
17966            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable
17967                .ensureFieldAccessorsInitialized(
17968                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.Builder.class);
17969          }
17970    
17971          public static com.google.protobuf.Parser<DelegationKey> PARSER =
17972              new com.google.protobuf.AbstractParser<DelegationKey>() {
17973            public DelegationKey parsePartialFrom(
17974                com.google.protobuf.CodedInputStream input,
17975                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17976                throws com.google.protobuf.InvalidProtocolBufferException {
17977              return new DelegationKey(input, extensionRegistry);
17978            }
17979          };
17980    
17981          @java.lang.Override
17982          public com.google.protobuf.Parser<DelegationKey> getParserForType() {
17983            return PARSER;
17984          }
17985    
17986          private int bitField0_;
17987          // optional uint32 id = 1;
17988          public static final int ID_FIELD_NUMBER = 1;
17989          private int id_;
17990          /**
17991           * <code>optional uint32 id = 1;</code>
17992           */
17993          public boolean hasId() {
17994            return ((bitField0_ & 0x00000001) == 0x00000001);
17995          }
17996          /**
17997           * <code>optional uint32 id = 1;</code>
17998           */
17999          public int getId() {
18000            return id_;
18001          }
18002    
18003          // optional uint64 expiryDate = 2;
18004          public static final int EXPIRYDATE_FIELD_NUMBER = 2;
18005          private long expiryDate_;
18006          /**
18007           * <code>optional uint64 expiryDate = 2;</code>
18008           */
18009          public boolean hasExpiryDate() {
18010            return ((bitField0_ & 0x00000002) == 0x00000002);
18011          }
18012          /**
18013           * <code>optional uint64 expiryDate = 2;</code>
18014           */
18015          public long getExpiryDate() {
18016            return expiryDate_;
18017          }
18018    
18019          // optional bytes key = 3;
18020          public static final int KEY_FIELD_NUMBER = 3;
18021          private com.google.protobuf.ByteString key_;
18022          /**
18023           * <code>optional bytes key = 3;</code>
18024           */
18025          public boolean hasKey() {
18026            return ((bitField0_ & 0x00000004) == 0x00000004);
18027          }
18028          /**
18029           * <code>optional bytes key = 3;</code>
18030           */
18031          public com.google.protobuf.ByteString getKey() {
18032            return key_;
18033          }
18034    
18035          private void initFields() {
18036            id_ = 0;
18037            expiryDate_ = 0L;
18038            key_ = com.google.protobuf.ByteString.EMPTY;
18039          }
18040          private byte memoizedIsInitialized = -1;
18041          public final boolean isInitialized() {
18042            byte isInitialized = memoizedIsInitialized;
18043            if (isInitialized != -1) return isInitialized == 1;
18044    
18045            memoizedIsInitialized = 1;
18046            return true;
18047          }
18048    
18049          public void writeTo(com.google.protobuf.CodedOutputStream output)
18050                              throws java.io.IOException {
18051            getSerializedSize();
18052            if (((bitField0_ & 0x00000001) == 0x00000001)) {
18053              output.writeUInt32(1, id_);
18054            }
18055            if (((bitField0_ & 0x00000002) == 0x00000002)) {
18056              output.writeUInt64(2, expiryDate_);
18057            }
18058            if (((bitField0_ & 0x00000004) == 0x00000004)) {
18059              output.writeBytes(3, key_);
18060            }
18061            getUnknownFields().writeTo(output);
18062          }
18063    
18064          private int memoizedSerializedSize = -1;
18065          public int getSerializedSize() {
18066            int size = memoizedSerializedSize;
18067            if (size != -1) return size;
18068    
18069            size = 0;
18070            if (((bitField0_ & 0x00000001) == 0x00000001)) {
18071              size += com.google.protobuf.CodedOutputStream
18072                .computeUInt32Size(1, id_);
18073            }
18074            if (((bitField0_ & 0x00000002) == 0x00000002)) {
18075              size += com.google.protobuf.CodedOutputStream
18076                .computeUInt64Size(2, expiryDate_);
18077            }
18078            if (((bitField0_ & 0x00000004) == 0x00000004)) {
18079              size += com.google.protobuf.CodedOutputStream
18080                .computeBytesSize(3, key_);
18081            }
18082            size += getUnknownFields().getSerializedSize();
18083            memoizedSerializedSize = size;
18084            return size;
18085          }
18086    
18087          private static final long serialVersionUID = 0L;
18088          @java.lang.Override
18089          protected java.lang.Object writeReplace()
18090              throws java.io.ObjectStreamException {
18091            return super.writeReplace();
18092          }
18093    
18094          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
18095              com.google.protobuf.ByteString data)
18096              throws com.google.protobuf.InvalidProtocolBufferException {
18097            return PARSER.parseFrom(data);
18098          }
18099          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
18100              com.google.protobuf.ByteString data,
18101              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18102              throws com.google.protobuf.InvalidProtocolBufferException {
18103            return PARSER.parseFrom(data, extensionRegistry);
18104          }
18105          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(byte[] data)
18106              throws com.google.protobuf.InvalidProtocolBufferException {
18107            return PARSER.parseFrom(data);
18108          }
18109          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
18110              byte[] data,
18111              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18112              throws com.google.protobuf.InvalidProtocolBufferException {
18113            return PARSER.parseFrom(data, extensionRegistry);
18114          }
18115          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(java.io.InputStream input)
18116              throws java.io.IOException {
18117            return PARSER.parseFrom(input);
18118          }
18119          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
18120              java.io.InputStream input,
18121              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18122              throws java.io.IOException {
18123            return PARSER.parseFrom(input, extensionRegistry);
18124          }
18125          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseDelimitedFrom(java.io.InputStream input)
18126              throws java.io.IOException {
18127            return PARSER.parseDelimitedFrom(input);
18128          }
18129          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseDelimitedFrom(
18130              java.io.InputStream input,
18131              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18132              throws java.io.IOException {
18133            return PARSER.parseDelimitedFrom(input, extensionRegistry);
18134          }
18135          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
18136              com.google.protobuf.CodedInputStream input)
18137              throws java.io.IOException {
18138            return PARSER.parseFrom(input);
18139          }
18140          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
18141              com.google.protobuf.CodedInputStream input,
18142              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18143              throws java.io.IOException {
18144            return PARSER.parseFrom(input, extensionRegistry);
18145          }
18146    
18147          public static Builder newBuilder() { return Builder.create(); }
18148          public Builder newBuilderForType() { return newBuilder(); }
18149          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey prototype) {
18150            return newBuilder().mergeFrom(prototype);
18151          }
18152          public Builder toBuilder() { return newBuilder(this); }
18153    
18154          @java.lang.Override
18155          protected Builder newBuilderForType(
18156              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18157            Builder builder = new Builder(parent);
18158            return builder;
18159          }
18160          /**
18161           * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey}
18162           */
18163          public static final class Builder extends
18164              com.google.protobuf.GeneratedMessage.Builder<Builder>
18165             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKeyOrBuilder {
18166            public static final com.google.protobuf.Descriptors.Descriptor
18167                getDescriptor() {
18168              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
18169            }
18170    
18171            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18172                internalGetFieldAccessorTable() {
18173              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable
18174                  .ensureFieldAccessorsInitialized(
18175                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.Builder.class);
18176            }
18177    
18178            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.newBuilder()
18179            private Builder() {
18180              maybeForceBuilderInitialization();
18181            }
18182    
18183            private Builder(
18184                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18185              super(parent);
18186              maybeForceBuilderInitialization();
18187            }
18188            private void maybeForceBuilderInitialization() {
18189              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18190              }
18191            }
18192            private static Builder create() {
18193              return new Builder();
18194            }
18195    
18196            public Builder clear() {
18197              super.clear();
18198              id_ = 0;
18199              bitField0_ = (bitField0_ & ~0x00000001);
18200              expiryDate_ = 0L;
18201              bitField0_ = (bitField0_ & ~0x00000002);
18202              key_ = com.google.protobuf.ByteString.EMPTY;
18203              bitField0_ = (bitField0_ & ~0x00000004);
18204              return this;
18205            }
18206    
18207            public Builder clone() {
18208              return create().mergeFrom(buildPartial());
18209            }
18210    
18211            public com.google.protobuf.Descriptors.Descriptor
18212                getDescriptorForType() {
18213              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
18214            }
18215    
18216            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey getDefaultInstanceForType() {
18217              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.getDefaultInstance();
18218            }
18219    
18220            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey build() {
18221              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey result = buildPartial();
18222              if (!result.isInitialized()) {
18223                throw newUninitializedMessageException(result);
18224              }
18225              return result;
18226            }
18227    
18228            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey buildPartial() {
18229              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey(this);
18230              int from_bitField0_ = bitField0_;
18231              int to_bitField0_ = 0;
18232              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
18233                to_bitField0_ |= 0x00000001;
18234              }
18235              result.id_ = id_;
18236              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
18237                to_bitField0_ |= 0x00000002;
18238              }
18239              result.expiryDate_ = expiryDate_;
18240              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
18241                to_bitField0_ |= 0x00000004;
18242              }
18243              result.key_ = key_;
18244              result.bitField0_ = to_bitField0_;
18245              onBuilt();
18246              return result;
18247            }
18248    
18249            public Builder mergeFrom(com.google.protobuf.Message other) {
18250              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey) {
18251                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey)other);
18252              } else {
18253                super.mergeFrom(other);
18254                return this;
18255              }
18256            }
18257    
18258            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey other) {
18259              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.getDefaultInstance()) return this;
18260              if (other.hasId()) {
18261                setId(other.getId());
18262              }
18263              if (other.hasExpiryDate()) {
18264                setExpiryDate(other.getExpiryDate());
18265              }
18266              if (other.hasKey()) {
18267                setKey(other.getKey());
18268              }
18269              this.mergeUnknownFields(other.getUnknownFields());
18270              return this;
18271            }
18272    
18273            public final boolean isInitialized() {
18274              return true;
18275            }
18276    
18277            public Builder mergeFrom(
18278                com.google.protobuf.CodedInputStream input,
18279                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18280                throws java.io.IOException {
18281              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parsedMessage = null;
18282              try {
18283                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18284              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18285                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey) e.getUnfinishedMessage();
18286                throw e;
18287              } finally {
18288                if (parsedMessage != null) {
18289                  mergeFrom(parsedMessage);
18290                }
18291              }
18292              return this;
18293            }
18294            private int bitField0_;
18295    
18296            // optional uint32 id = 1;
18297            private int id_ ;
18298            /**
18299             * <code>optional uint32 id = 1;</code>
18300             */
18301            public boolean hasId() {
18302              return ((bitField0_ & 0x00000001) == 0x00000001);
18303            }
18304            /**
18305             * <code>optional uint32 id = 1;</code>
18306             */
18307            public int getId() {
18308              return id_;
18309            }
18310            /**
18311             * <code>optional uint32 id = 1;</code>
18312             */
18313            public Builder setId(int value) {
18314              bitField0_ |= 0x00000001;
18315              id_ = value;
18316              onChanged();
18317              return this;
18318            }
18319            /**
18320             * <code>optional uint32 id = 1;</code>
18321             */
18322            public Builder clearId() {
18323              bitField0_ = (bitField0_ & ~0x00000001);
18324              id_ = 0;
18325              onChanged();
18326              return this;
18327            }
18328    
18329            // optional uint64 expiryDate = 2;
18330            private long expiryDate_ ;
18331            /**
18332             * <code>optional uint64 expiryDate = 2;</code>
18333             */
18334            public boolean hasExpiryDate() {
18335              return ((bitField0_ & 0x00000002) == 0x00000002);
18336            }
18337            /**
18338             * <code>optional uint64 expiryDate = 2;</code>
18339             */
18340            public long getExpiryDate() {
18341              return expiryDate_;
18342            }
18343            /**
18344             * <code>optional uint64 expiryDate = 2;</code>
18345             */
18346            public Builder setExpiryDate(long value) {
18347              bitField0_ |= 0x00000002;
18348              expiryDate_ = value;
18349              onChanged();
18350              return this;
18351            }
18352            /**
18353             * <code>optional uint64 expiryDate = 2;</code>
18354             */
18355            public Builder clearExpiryDate() {
18356              bitField0_ = (bitField0_ & ~0x00000002);
18357              expiryDate_ = 0L;
18358              onChanged();
18359              return this;
18360            }
18361    
18362            // optional bytes key = 3;
18363            private com.google.protobuf.ByteString key_ = com.google.protobuf.ByteString.EMPTY;
18364            /**
18365             * <code>optional bytes key = 3;</code>
18366             */
18367            public boolean hasKey() {
18368              return ((bitField0_ & 0x00000004) == 0x00000004);
18369            }
18370            /**
18371             * <code>optional bytes key = 3;</code>
18372             */
18373            public com.google.protobuf.ByteString getKey() {
18374              return key_;
18375            }
18376            /**
18377             * <code>optional bytes key = 3;</code>
18378             */
18379            public Builder setKey(com.google.protobuf.ByteString value) {
18380              if (value == null) {
18381        throw new NullPointerException();
18382      }
18383      bitField0_ |= 0x00000004;
18384              key_ = value;
18385              onChanged();
18386              return this;
18387            }
18388            /**
18389             * <code>optional bytes key = 3;</code>
18390             */
18391            public Builder clearKey() {
18392              bitField0_ = (bitField0_ & ~0x00000004);
18393              key_ = getDefaultInstance().getKey();
18394              onChanged();
18395              return this;
18396            }
18397    
18398            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey)
18399          }
18400    
18401          static {
18402            defaultInstance = new DelegationKey(true);
18403            defaultInstance.initFields();
18404          }
18405    
18406          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey)
18407        }
18408    
18409        public interface PersistTokenOrBuilder
18410            extends com.google.protobuf.MessageOrBuilder {
18411    
18412          // optional uint32 version = 1;
18413          /**
18414           * <code>optional uint32 version = 1;</code>
18415           */
18416          boolean hasVersion();
18417          /**
18418           * <code>optional uint32 version = 1;</code>
18419           */
18420          int getVersion();
18421    
18422          // optional string owner = 2;
18423          /**
18424           * <code>optional string owner = 2;</code>
18425           */
18426          boolean hasOwner();
18427          /**
18428           * <code>optional string owner = 2;</code>
18429           */
18430          java.lang.String getOwner();
18431          /**
18432           * <code>optional string owner = 2;</code>
18433           */
18434          com.google.protobuf.ByteString
18435              getOwnerBytes();
18436    
18437          // optional string renewer = 3;
18438          /**
18439           * <code>optional string renewer = 3;</code>
18440           */
18441          boolean hasRenewer();
18442          /**
18443           * <code>optional string renewer = 3;</code>
18444           */
18445          java.lang.String getRenewer();
18446          /**
18447           * <code>optional string renewer = 3;</code>
18448           */
18449          com.google.protobuf.ByteString
18450              getRenewerBytes();
18451    
18452          // optional string realUser = 4;
18453          /**
18454           * <code>optional string realUser = 4;</code>
18455           */
18456          boolean hasRealUser();
18457          /**
18458           * <code>optional string realUser = 4;</code>
18459           */
18460          java.lang.String getRealUser();
18461          /**
18462           * <code>optional string realUser = 4;</code>
18463           */
18464          com.google.protobuf.ByteString
18465              getRealUserBytes();
18466    
18467          // optional uint64 issueDate = 5;
18468          /**
18469           * <code>optional uint64 issueDate = 5;</code>
18470           */
18471          boolean hasIssueDate();
18472          /**
18473           * <code>optional uint64 issueDate = 5;</code>
18474           */
18475          long getIssueDate();
18476    
18477          // optional uint64 maxDate = 6;
18478          /**
18479           * <code>optional uint64 maxDate = 6;</code>
18480           */
18481          boolean hasMaxDate();
18482          /**
18483           * <code>optional uint64 maxDate = 6;</code>
18484           */
18485          long getMaxDate();
18486    
18487          // optional uint32 sequenceNumber = 7;
18488          /**
18489           * <code>optional uint32 sequenceNumber = 7;</code>
18490           */
18491          boolean hasSequenceNumber();
18492          /**
18493           * <code>optional uint32 sequenceNumber = 7;</code>
18494           */
18495          int getSequenceNumber();
18496    
18497          // optional uint32 masterKeyId = 8;
18498          /**
18499           * <code>optional uint32 masterKeyId = 8;</code>
18500           */
18501          boolean hasMasterKeyId();
18502          /**
18503           * <code>optional uint32 masterKeyId = 8;</code>
18504           */
18505          int getMasterKeyId();
18506    
18507          // optional uint64 expiryDate = 9;
18508          /**
18509           * <code>optional uint64 expiryDate = 9;</code>
18510           */
18511          boolean hasExpiryDate();
18512          /**
18513           * <code>optional uint64 expiryDate = 9;</code>
18514           */
18515          long getExpiryDate();
18516        }
18517        /**
18518         * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.PersistToken}
18519         */
18520        public static final class PersistToken extends
18521            com.google.protobuf.GeneratedMessage
18522            implements PersistTokenOrBuilder {
18523          // Use PersistToken.newBuilder() to construct.
18524          private PersistToken(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
18525            super(builder);
18526            this.unknownFields = builder.getUnknownFields();
18527          }
18528          private PersistToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18529    
18530          private static final PersistToken defaultInstance;
18531          public static PersistToken getDefaultInstance() {
18532            return defaultInstance;
18533          }
18534    
18535          public PersistToken getDefaultInstanceForType() {
18536            return defaultInstance;
18537          }
18538    
18539          private final com.google.protobuf.UnknownFieldSet unknownFields;
18540          @java.lang.Override
18541          public final com.google.protobuf.UnknownFieldSet
18542              getUnknownFields() {
18543            return this.unknownFields;
18544          }
18545          private PersistToken(
18546              com.google.protobuf.CodedInputStream input,
18547              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18548              throws com.google.protobuf.InvalidProtocolBufferException {
18549            initFields();
18550            int mutable_bitField0_ = 0;
18551            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18552                com.google.protobuf.UnknownFieldSet.newBuilder();
18553            try {
18554              boolean done = false;
18555              while (!done) {
18556                int tag = input.readTag();
18557                switch (tag) {
18558                  case 0:
18559                    done = true;
18560                    break;
18561                  default: {
18562                    if (!parseUnknownField(input, unknownFields,
18563                                           extensionRegistry, tag)) {
18564                      done = true;
18565                    }
18566                    break;
18567                  }
18568                  case 8: {
18569                    bitField0_ |= 0x00000001;
18570                    version_ = input.readUInt32();
18571                    break;
18572                  }
18573                  case 18: {
18574                    bitField0_ |= 0x00000002;
18575                    owner_ = input.readBytes();
18576                    break;
18577                  }
18578                  case 26: {
18579                    bitField0_ |= 0x00000004;
18580                    renewer_ = input.readBytes();
18581                    break;
18582                  }
18583                  case 34: {
18584                    bitField0_ |= 0x00000008;
18585                    realUser_ = input.readBytes();
18586                    break;
18587                  }
18588                  case 40: {
18589                    bitField0_ |= 0x00000010;
18590                    issueDate_ = input.readUInt64();
18591                    break;
18592                  }
18593                  case 48: {
18594                    bitField0_ |= 0x00000020;
18595                    maxDate_ = input.readUInt64();
18596                    break;
18597                  }
18598                  case 56: {
18599                    bitField0_ |= 0x00000040;
18600                    sequenceNumber_ = input.readUInt32();
18601                    break;
18602                  }
18603                  case 64: {
18604                    bitField0_ |= 0x00000080;
18605                    masterKeyId_ = input.readUInt32();
18606                    break;
18607                  }
18608                  case 72: {
18609                    bitField0_ |= 0x00000100;
18610                    expiryDate_ = input.readUInt64();
18611                    break;
18612                  }
18613                }
18614              }
18615            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18616              throw e.setUnfinishedMessage(this);
18617            } catch (java.io.IOException e) {
18618              throw new com.google.protobuf.InvalidProtocolBufferException(
18619                  e.getMessage()).setUnfinishedMessage(this);
18620            } finally {
18621              this.unknownFields = unknownFields.build();
18622              makeExtensionsImmutable();
18623            }
18624          }
18625          public static final com.google.protobuf.Descriptors.Descriptor
18626              getDescriptor() {
18627            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
18628          }
18629    
18630          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18631              internalGetFieldAccessorTable() {
18632            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable
18633                .ensureFieldAccessorsInitialized(
18634                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.Builder.class);
18635          }
18636    
18637          public static com.google.protobuf.Parser<PersistToken> PARSER =
18638              new com.google.protobuf.AbstractParser<PersistToken>() {
18639            public PersistToken parsePartialFrom(
18640                com.google.protobuf.CodedInputStream input,
18641                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18642                throws com.google.protobuf.InvalidProtocolBufferException {
18643              return new PersistToken(input, extensionRegistry);
18644            }
18645          };
18646    
18647          @java.lang.Override
18648          public com.google.protobuf.Parser<PersistToken> getParserForType() {
18649            return PARSER;
18650          }
18651    
18652          private int bitField0_;
18653          // optional uint32 version = 1;
18654          public static final int VERSION_FIELD_NUMBER = 1;
18655          private int version_;
18656          /**
18657           * <code>optional uint32 version = 1;</code>
18658           */
18659          public boolean hasVersion() {
18660            return ((bitField0_ & 0x00000001) == 0x00000001);
18661          }
18662          /**
18663           * <code>optional uint32 version = 1;</code>
18664           */
18665          public int getVersion() {
18666            return version_;
18667          }
18668    
18669          // optional string owner = 2;
18670          public static final int OWNER_FIELD_NUMBER = 2;
18671          private java.lang.Object owner_;
18672          /**
18673           * <code>optional string owner = 2;</code>
18674           */
18675          public boolean hasOwner() {
18676            return ((bitField0_ & 0x00000002) == 0x00000002);
18677          }
18678          /**
18679           * <code>optional string owner = 2;</code>
18680           */
18681          public java.lang.String getOwner() {
18682            java.lang.Object ref = owner_;
18683            if (ref instanceof java.lang.String) {
18684              return (java.lang.String) ref;
18685            } else {
18686              com.google.protobuf.ByteString bs = 
18687                  (com.google.protobuf.ByteString) ref;
18688              java.lang.String s = bs.toStringUtf8();
18689              if (bs.isValidUtf8()) {
18690                owner_ = s;
18691              }
18692              return s;
18693            }
18694          }
18695          /**
18696           * <code>optional string owner = 2;</code>
18697           */
18698          public com.google.protobuf.ByteString
18699              getOwnerBytes() {
18700            java.lang.Object ref = owner_;
18701            if (ref instanceof java.lang.String) {
18702              com.google.protobuf.ByteString b = 
18703                  com.google.protobuf.ByteString.copyFromUtf8(
18704                      (java.lang.String) ref);
18705              owner_ = b;
18706              return b;
18707            } else {
18708              return (com.google.protobuf.ByteString) ref;
18709            }
18710          }
18711    
18712          // optional string renewer = 3;
18713          public static final int RENEWER_FIELD_NUMBER = 3;
18714          private java.lang.Object renewer_;
18715          /**
18716           * <code>optional string renewer = 3;</code>
18717           */
18718          public boolean hasRenewer() {
18719            return ((bitField0_ & 0x00000004) == 0x00000004);
18720          }
18721          /**
18722           * <code>optional string renewer = 3;</code>
18723           */
18724          public java.lang.String getRenewer() {
18725            java.lang.Object ref = renewer_;
18726            if (ref instanceof java.lang.String) {
18727              return (java.lang.String) ref;
18728            } else {
18729              com.google.protobuf.ByteString bs = 
18730                  (com.google.protobuf.ByteString) ref;
18731              java.lang.String s = bs.toStringUtf8();
18732              if (bs.isValidUtf8()) {
18733                renewer_ = s;
18734              }
18735              return s;
18736            }
18737          }
18738          /**
18739           * <code>optional string renewer = 3;</code>
18740           */
18741          public com.google.protobuf.ByteString
18742              getRenewerBytes() {
18743            java.lang.Object ref = renewer_;
18744            if (ref instanceof java.lang.String) {
18745              com.google.protobuf.ByteString b = 
18746                  com.google.protobuf.ByteString.copyFromUtf8(
18747                      (java.lang.String) ref);
18748              renewer_ = b;
18749              return b;
18750            } else {
18751              return (com.google.protobuf.ByteString) ref;
18752            }
18753          }
18754    
18755          // optional string realUser = 4;
18756          public static final int REALUSER_FIELD_NUMBER = 4;
18757          private java.lang.Object realUser_;
18758          /**
18759           * <code>optional string realUser = 4;</code>
18760           */
18761          public boolean hasRealUser() {
18762            return ((bitField0_ & 0x00000008) == 0x00000008);
18763          }
18764          /**
18765           * <code>optional string realUser = 4;</code>
18766           */
18767          public java.lang.String getRealUser() {
18768            java.lang.Object ref = realUser_;
18769            if (ref instanceof java.lang.String) {
18770              return (java.lang.String) ref;
18771            } else {
18772              com.google.protobuf.ByteString bs = 
18773                  (com.google.protobuf.ByteString) ref;
18774              java.lang.String s = bs.toStringUtf8();
18775              if (bs.isValidUtf8()) {
18776                realUser_ = s;
18777              }
18778              return s;
18779            }
18780          }
18781          /**
18782           * <code>optional string realUser = 4;</code>
18783           */
18784          public com.google.protobuf.ByteString
18785              getRealUserBytes() {
18786            java.lang.Object ref = realUser_;
18787            if (ref instanceof java.lang.String) {
18788              com.google.protobuf.ByteString b = 
18789                  com.google.protobuf.ByteString.copyFromUtf8(
18790                      (java.lang.String) ref);
18791              realUser_ = b;
18792              return b;
18793            } else {
18794              return (com.google.protobuf.ByteString) ref;
18795            }
18796          }
18797    
18798          // optional uint64 issueDate = 5;
18799          public static final int ISSUEDATE_FIELD_NUMBER = 5;
18800          private long issueDate_;
18801          /**
18802           * <code>optional uint64 issueDate = 5;</code>
18803           */
18804          public boolean hasIssueDate() {
18805            return ((bitField0_ & 0x00000010) == 0x00000010);
18806          }
18807          /**
18808           * <code>optional uint64 issueDate = 5;</code>
18809           */
18810          public long getIssueDate() {
18811            return issueDate_;
18812          }
18813    
18814          // optional uint64 maxDate = 6;
18815          public static final int MAXDATE_FIELD_NUMBER = 6;
18816          private long maxDate_;
18817          /**
18818           * <code>optional uint64 maxDate = 6;</code>
18819           */
18820          public boolean hasMaxDate() {
18821            return ((bitField0_ & 0x00000020) == 0x00000020);
18822          }
18823          /**
18824           * <code>optional uint64 maxDate = 6;</code>
18825           */
18826          public long getMaxDate() {
18827            return maxDate_;
18828          }
18829    
18830          // optional uint32 sequenceNumber = 7;
18831          public static final int SEQUENCENUMBER_FIELD_NUMBER = 7;
18832          private int sequenceNumber_;
18833          /**
18834           * <code>optional uint32 sequenceNumber = 7;</code>
18835           */
18836          public boolean hasSequenceNumber() {
18837            return ((bitField0_ & 0x00000040) == 0x00000040);
18838          }
18839          /**
18840           * <code>optional uint32 sequenceNumber = 7;</code>
18841           */
18842          public int getSequenceNumber() {
18843            return sequenceNumber_;
18844          }
18845    
18846          // optional uint32 masterKeyId = 8;
18847          public static final int MASTERKEYID_FIELD_NUMBER = 8;
18848          private int masterKeyId_;
18849          /**
18850           * <code>optional uint32 masterKeyId = 8;</code>
18851           */
18852          public boolean hasMasterKeyId() {
18853            return ((bitField0_ & 0x00000080) == 0x00000080);
18854          }
18855          /**
18856           * <code>optional uint32 masterKeyId = 8;</code>
18857           */
18858          public int getMasterKeyId() {
18859            return masterKeyId_;
18860          }
18861    
18862          // optional uint64 expiryDate = 9;
18863          public static final int EXPIRYDATE_FIELD_NUMBER = 9;
18864          private long expiryDate_;
18865          /**
18866           * <code>optional uint64 expiryDate = 9;</code>
18867           */
18868          public boolean hasExpiryDate() {
18869            return ((bitField0_ & 0x00000100) == 0x00000100);
18870          }
18871          /**
18872           * <code>optional uint64 expiryDate = 9;</code>
18873           */
18874          public long getExpiryDate() {
18875            return expiryDate_;
18876          }
18877    
18878          private void initFields() {
18879            version_ = 0;
18880            owner_ = "";
18881            renewer_ = "";
18882            realUser_ = "";
18883            issueDate_ = 0L;
18884            maxDate_ = 0L;
18885            sequenceNumber_ = 0;
18886            masterKeyId_ = 0;
18887            expiryDate_ = 0L;
18888          }
18889          private byte memoizedIsInitialized = -1;
18890          public final boolean isInitialized() {
18891            byte isInitialized = memoizedIsInitialized;
18892            if (isInitialized != -1) return isInitialized == 1;
18893    
18894            memoizedIsInitialized = 1;
18895            return true;
18896          }
18897    
18898          public void writeTo(com.google.protobuf.CodedOutputStream output)
18899                              throws java.io.IOException {
18900            getSerializedSize();
18901            if (((bitField0_ & 0x00000001) == 0x00000001)) {
18902              output.writeUInt32(1, version_);
18903            }
18904            if (((bitField0_ & 0x00000002) == 0x00000002)) {
18905              output.writeBytes(2, getOwnerBytes());
18906            }
18907            if (((bitField0_ & 0x00000004) == 0x00000004)) {
18908              output.writeBytes(3, getRenewerBytes());
18909            }
18910            if (((bitField0_ & 0x00000008) == 0x00000008)) {
18911              output.writeBytes(4, getRealUserBytes());
18912            }
18913            if (((bitField0_ & 0x00000010) == 0x00000010)) {
18914              output.writeUInt64(5, issueDate_);
18915            }
18916            if (((bitField0_ & 0x00000020) == 0x00000020)) {
18917              output.writeUInt64(6, maxDate_);
18918            }
18919            if (((bitField0_ & 0x00000040) == 0x00000040)) {
18920              output.writeUInt32(7, sequenceNumber_);
18921            }
18922            if (((bitField0_ & 0x00000080) == 0x00000080)) {
18923              output.writeUInt32(8, masterKeyId_);
18924            }
18925            if (((bitField0_ & 0x00000100) == 0x00000100)) {
18926              output.writeUInt64(9, expiryDate_);
18927            }
18928            getUnknownFields().writeTo(output);
18929          }
18930    
18931          private int memoizedSerializedSize = -1;
18932          public int getSerializedSize() {
18933            int size = memoizedSerializedSize;
18934            if (size != -1) return size;
18935    
18936            size = 0;
18937            if (((bitField0_ & 0x00000001) == 0x00000001)) {
18938              size += com.google.protobuf.CodedOutputStream
18939                .computeUInt32Size(1, version_);
18940            }
18941            if (((bitField0_ & 0x00000002) == 0x00000002)) {
18942              size += com.google.protobuf.CodedOutputStream
18943                .computeBytesSize(2, getOwnerBytes());
18944            }
18945            if (((bitField0_ & 0x00000004) == 0x00000004)) {
18946              size += com.google.protobuf.CodedOutputStream
18947                .computeBytesSize(3, getRenewerBytes());
18948            }
18949            if (((bitField0_ & 0x00000008) == 0x00000008)) {
18950              size += com.google.protobuf.CodedOutputStream
18951                .computeBytesSize(4, getRealUserBytes());
18952            }
18953            if (((bitField0_ & 0x00000010) == 0x00000010)) {
18954              size += com.google.protobuf.CodedOutputStream
18955                .computeUInt64Size(5, issueDate_);
18956            }
18957            if (((bitField0_ & 0x00000020) == 0x00000020)) {
18958              size += com.google.protobuf.CodedOutputStream
18959                .computeUInt64Size(6, maxDate_);
18960            }
18961            if (((bitField0_ & 0x00000040) == 0x00000040)) {
18962              size += com.google.protobuf.CodedOutputStream
18963                .computeUInt32Size(7, sequenceNumber_);
18964            }
18965            if (((bitField0_ & 0x00000080) == 0x00000080)) {
18966              size += com.google.protobuf.CodedOutputStream
18967                .computeUInt32Size(8, masterKeyId_);
18968            }
18969            if (((bitField0_ & 0x00000100) == 0x00000100)) {
18970              size += com.google.protobuf.CodedOutputStream
18971                .computeUInt64Size(9, expiryDate_);
18972            }
18973            size += getUnknownFields().getSerializedSize();
18974            memoizedSerializedSize = size;
18975            return size;
18976          }
18977    
18978          private static final long serialVersionUID = 0L;
18979          @java.lang.Override
18980          protected java.lang.Object writeReplace()
18981              throws java.io.ObjectStreamException {
18982            return super.writeReplace();
18983          }
18984    
18985          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
18986              com.google.protobuf.ByteString data)
18987              throws com.google.protobuf.InvalidProtocolBufferException {
18988            return PARSER.parseFrom(data);
18989          }
18990          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
18991              com.google.protobuf.ByteString data,
18992              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18993              throws com.google.protobuf.InvalidProtocolBufferException {
18994            return PARSER.parseFrom(data, extensionRegistry);
18995          }
18996          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(byte[] data)
18997              throws com.google.protobuf.InvalidProtocolBufferException {
18998            return PARSER.parseFrom(data);
18999          }
19000          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
19001              byte[] data,
19002              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19003              throws com.google.protobuf.InvalidProtocolBufferException {
19004            return PARSER.parseFrom(data, extensionRegistry);
19005          }
19006          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(java.io.InputStream input)
19007              throws java.io.IOException {
19008            return PARSER.parseFrom(input);
19009          }
19010          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
19011              java.io.InputStream input,
19012              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19013              throws java.io.IOException {
19014            return PARSER.parseFrom(input, extensionRegistry);
19015          }
19016          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseDelimitedFrom(java.io.InputStream input)
19017              throws java.io.IOException {
19018            return PARSER.parseDelimitedFrom(input);
19019          }
19020          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseDelimitedFrom(
19021              java.io.InputStream input,
19022              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19023              throws java.io.IOException {
19024            return PARSER.parseDelimitedFrom(input, extensionRegistry);
19025          }
19026          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
19027              com.google.protobuf.CodedInputStream input)
19028              throws java.io.IOException {
19029            return PARSER.parseFrom(input);
19030          }
19031          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
19032              com.google.protobuf.CodedInputStream input,
19033              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19034              throws java.io.IOException {
19035            return PARSER.parseFrom(input, extensionRegistry);
19036          }
19037    
19038          public static Builder newBuilder() { return Builder.create(); }
19039          public Builder newBuilderForType() { return newBuilder(); }
19040          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken prototype) {
19041            return newBuilder().mergeFrom(prototype);
19042          }
19043          public Builder toBuilder() { return newBuilder(this); }
19044    
19045          @java.lang.Override
19046          protected Builder newBuilderForType(
19047              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19048            Builder builder = new Builder(parent);
19049            return builder;
19050          }
19051          /**
19052           * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.PersistToken}
19053           */
19054          public static final class Builder extends
19055              com.google.protobuf.GeneratedMessage.Builder<Builder>
19056             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistTokenOrBuilder {
19057            public static final com.google.protobuf.Descriptors.Descriptor
19058                getDescriptor() {
19059              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
19060            }
19061    
19062            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19063                internalGetFieldAccessorTable() {
19064              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable
19065                  .ensureFieldAccessorsInitialized(
19066                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.Builder.class);
19067            }
19068    
19069            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.newBuilder()
19070            private Builder() {
19071              maybeForceBuilderInitialization();
19072            }
19073    
19074            private Builder(
19075                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19076              super(parent);
19077              maybeForceBuilderInitialization();
19078            }
19079            private void maybeForceBuilderInitialization() {
19080              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19081              }
19082            }
19083            private static Builder create() {
19084              return new Builder();
19085            }
19086    
19087            public Builder clear() {
19088              super.clear();
19089              version_ = 0;
19090              bitField0_ = (bitField0_ & ~0x00000001);
19091              owner_ = "";
19092              bitField0_ = (bitField0_ & ~0x00000002);
19093              renewer_ = "";
19094              bitField0_ = (bitField0_ & ~0x00000004);
19095              realUser_ = "";
19096              bitField0_ = (bitField0_ & ~0x00000008);
19097              issueDate_ = 0L;
19098              bitField0_ = (bitField0_ & ~0x00000010);
19099              maxDate_ = 0L;
19100              bitField0_ = (bitField0_ & ~0x00000020);
19101              sequenceNumber_ = 0;
19102              bitField0_ = (bitField0_ & ~0x00000040);
19103              masterKeyId_ = 0;
19104              bitField0_ = (bitField0_ & ~0x00000080);
19105              expiryDate_ = 0L;
19106              bitField0_ = (bitField0_ & ~0x00000100);
19107              return this;
19108            }
19109    
19110            public Builder clone() {
19111              return create().mergeFrom(buildPartial());
19112            }
19113    
19114            public com.google.protobuf.Descriptors.Descriptor
19115                getDescriptorForType() {
19116              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
19117            }
19118    
19119            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken getDefaultInstanceForType() {
19120              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.getDefaultInstance();
19121            }
19122    
19123            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken build() {
19124              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken result = buildPartial();
19125              if (!result.isInitialized()) {
19126                throw newUninitializedMessageException(result);
19127              }
19128              return result;
19129            }
19130    
19131            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken buildPartial() {
19132              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken(this);
19133              int from_bitField0_ = bitField0_;
19134              int to_bitField0_ = 0;
19135              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
19136                to_bitField0_ |= 0x00000001;
19137              }
19138              result.version_ = version_;
19139              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
19140                to_bitField0_ |= 0x00000002;
19141              }
19142              result.owner_ = owner_;
19143              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
19144                to_bitField0_ |= 0x00000004;
19145              }
19146              result.renewer_ = renewer_;
19147              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
19148                to_bitField0_ |= 0x00000008;
19149              }
19150              result.realUser_ = realUser_;
19151              if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
19152                to_bitField0_ |= 0x00000010;
19153              }
19154              result.issueDate_ = issueDate_;
19155              if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
19156                to_bitField0_ |= 0x00000020;
19157              }
19158              result.maxDate_ = maxDate_;
19159              if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
19160                to_bitField0_ |= 0x00000040;
19161              }
19162              result.sequenceNumber_ = sequenceNumber_;
19163              if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
19164                to_bitField0_ |= 0x00000080;
19165              }
19166              result.masterKeyId_ = masterKeyId_;
19167              if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
19168                to_bitField0_ |= 0x00000100;
19169              }
19170              result.expiryDate_ = expiryDate_;
19171              result.bitField0_ = to_bitField0_;
19172              onBuilt();
19173              return result;
19174            }
19175    
19176            public Builder mergeFrom(com.google.protobuf.Message other) {
19177              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken) {
19178                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken)other);
19179              } else {
19180                super.mergeFrom(other);
19181                return this;
19182              }
19183            }
19184    
19185            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken other) {
19186              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.getDefaultInstance()) return this;
19187              if (other.hasVersion()) {
19188                setVersion(other.getVersion());
19189              }
19190              if (other.hasOwner()) {
19191                bitField0_ |= 0x00000002;
19192                owner_ = other.owner_;
19193                onChanged();
19194              }
19195              if (other.hasRenewer()) {
19196                bitField0_ |= 0x00000004;
19197                renewer_ = other.renewer_;
19198                onChanged();
19199              }
19200              if (other.hasRealUser()) {
19201                bitField0_ |= 0x00000008;
19202                realUser_ = other.realUser_;
19203                onChanged();
19204              }
19205              if (other.hasIssueDate()) {
19206                setIssueDate(other.getIssueDate());
19207              }
19208              if (other.hasMaxDate()) {
19209                setMaxDate(other.getMaxDate());
19210              }
19211              if (other.hasSequenceNumber()) {
19212                setSequenceNumber(other.getSequenceNumber());
19213              }
19214              if (other.hasMasterKeyId()) {
19215                setMasterKeyId(other.getMasterKeyId());
19216              }
19217              if (other.hasExpiryDate()) {
19218                setExpiryDate(other.getExpiryDate());
19219              }
19220              this.mergeUnknownFields(other.getUnknownFields());
19221              return this;
19222            }
19223    
19224            public final boolean isInitialized() {
19225              return true;
19226            }
19227    
19228            public Builder mergeFrom(
19229                com.google.protobuf.CodedInputStream input,
19230                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19231                throws java.io.IOException {
19232              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parsedMessage = null;
19233              try {
19234                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
19235              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19236                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken) e.getUnfinishedMessage();
19237                throw e;
19238              } finally {
19239                if (parsedMessage != null) {
19240                  mergeFrom(parsedMessage);
19241                }
19242              }
19243              return this;
19244            }
19245            private int bitField0_;
19246    
19247            // optional uint32 version = 1;
19248            private int version_ ;
19249            /**
19250             * <code>optional uint32 version = 1;</code>
19251             */
19252            public boolean hasVersion() {
19253              return ((bitField0_ & 0x00000001) == 0x00000001);
19254            }
19255            /**
19256             * <code>optional uint32 version = 1;</code>
19257             */
19258            public int getVersion() {
19259              return version_;
19260            }
19261            /**
19262             * <code>optional uint32 version = 1;</code>
19263             */
19264            public Builder setVersion(int value) {
19265              bitField0_ |= 0x00000001;
19266              version_ = value;
19267              onChanged();
19268              return this;
19269            }
19270            /**
19271             * <code>optional uint32 version = 1;</code>
19272             */
19273            public Builder clearVersion() {
19274              bitField0_ = (bitField0_ & ~0x00000001);
19275              version_ = 0;
19276              onChanged();
19277              return this;
19278            }
19279    
19280            // optional string owner = 2;
19281            private java.lang.Object owner_ = "";
19282            /**
19283             * <code>optional string owner = 2;</code>
19284             */
19285            public boolean hasOwner() {
19286              return ((bitField0_ & 0x00000002) == 0x00000002);
19287            }
19288            /**
19289             * <code>optional string owner = 2;</code>
19290             */
19291            public java.lang.String getOwner() {
19292              java.lang.Object ref = owner_;
19293              if (!(ref instanceof java.lang.String)) {
19294                java.lang.String s = ((com.google.protobuf.ByteString) ref)
19295                    .toStringUtf8();
19296                owner_ = s;
19297                return s;
19298              } else {
19299                return (java.lang.String) ref;
19300              }
19301            }
19302            /**
19303             * <code>optional string owner = 2;</code>
19304             */
19305            public com.google.protobuf.ByteString
19306                getOwnerBytes() {
19307              java.lang.Object ref = owner_;
19308              if (ref instanceof String) {
19309                com.google.protobuf.ByteString b = 
19310                    com.google.protobuf.ByteString.copyFromUtf8(
19311                        (java.lang.String) ref);
19312                owner_ = b;
19313                return b;
19314              } else {
19315                return (com.google.protobuf.ByteString) ref;
19316              }
19317            }
19318            /**
19319             * <code>optional string owner = 2;</code>
19320             */
19321            public Builder setOwner(
19322                java.lang.String value) {
19323              if (value == null) {
19324        throw new NullPointerException();
19325      }
19326      bitField0_ |= 0x00000002;
19327              owner_ = value;
19328              onChanged();
19329              return this;
19330            }
19331            /**
19332             * <code>optional string owner = 2;</code>
19333             */
19334            public Builder clearOwner() {
19335              bitField0_ = (bitField0_ & ~0x00000002);
19336              owner_ = getDefaultInstance().getOwner();
19337              onChanged();
19338              return this;
19339            }
19340            /**
19341             * <code>optional string owner = 2;</code>
19342             */
19343            public Builder setOwnerBytes(
19344                com.google.protobuf.ByteString value) {
19345              if (value == null) {
19346        throw new NullPointerException();
19347      }
19348      bitField0_ |= 0x00000002;
19349              owner_ = value;
19350              onChanged();
19351              return this;
19352            }
19353    
19354            // optional string renewer = 3;
19355            private java.lang.Object renewer_ = "";
19356            /**
19357             * <code>optional string renewer = 3;</code>
19358             */
19359            public boolean hasRenewer() {
19360              return ((bitField0_ & 0x00000004) == 0x00000004);
19361            }
19362            /**
19363             * <code>optional string renewer = 3;</code>
19364             */
19365            public java.lang.String getRenewer() {
19366              java.lang.Object ref = renewer_;
19367              if (!(ref instanceof java.lang.String)) {
19368                java.lang.String s = ((com.google.protobuf.ByteString) ref)
19369                    .toStringUtf8();
19370                renewer_ = s;
19371                return s;
19372              } else {
19373                return (java.lang.String) ref;
19374              }
19375            }
19376            /**
19377             * <code>optional string renewer = 3;</code>
19378             */
19379            public com.google.protobuf.ByteString
19380                getRenewerBytes() {
19381              java.lang.Object ref = renewer_;
19382              if (ref instanceof String) {
19383                com.google.protobuf.ByteString b = 
19384                    com.google.protobuf.ByteString.copyFromUtf8(
19385                        (java.lang.String) ref);
19386                renewer_ = b;
19387                return b;
19388              } else {
19389                return (com.google.protobuf.ByteString) ref;
19390              }
19391            }
19392            /**
19393             * <code>optional string renewer = 3;</code>
19394             */
19395            public Builder setRenewer(
19396                java.lang.String value) {
19397              if (value == null) {
19398        throw new NullPointerException();
19399      }
19400      bitField0_ |= 0x00000004;
19401              renewer_ = value;
19402              onChanged();
19403              return this;
19404            }
19405            /**
19406             * <code>optional string renewer = 3;</code>
19407             */
19408            public Builder clearRenewer() {
19409              bitField0_ = (bitField0_ & ~0x00000004);
19410              renewer_ = getDefaultInstance().getRenewer();
19411              onChanged();
19412              return this;
19413            }
19414            /**
19415             * <code>optional string renewer = 3;</code>
19416             */
19417            public Builder setRenewerBytes(
19418                com.google.protobuf.ByteString value) {
19419              if (value == null) {
19420        throw new NullPointerException();
19421      }
19422      bitField0_ |= 0x00000004;
19423              renewer_ = value;
19424              onChanged();
19425              return this;
19426            }
19427    
19428            // optional string realUser = 4;
19429            private java.lang.Object realUser_ = "";
19430            /**
19431             * <code>optional string realUser = 4;</code>
19432             */
19433            public boolean hasRealUser() {
19434              return ((bitField0_ & 0x00000008) == 0x00000008);
19435            }
19436            /**
19437             * <code>optional string realUser = 4;</code>
19438             */
19439            public java.lang.String getRealUser() {
19440              java.lang.Object ref = realUser_;
19441              if (!(ref instanceof java.lang.String)) {
19442                java.lang.String s = ((com.google.protobuf.ByteString) ref)
19443                    .toStringUtf8();
19444                realUser_ = s;
19445                return s;
19446              } else {
19447                return (java.lang.String) ref;
19448              }
19449            }
19450            /**
19451             * <code>optional string realUser = 4;</code>
19452             */
19453            public com.google.protobuf.ByteString
19454                getRealUserBytes() {
19455              java.lang.Object ref = realUser_;
19456              if (ref instanceof String) {
19457                com.google.protobuf.ByteString b = 
19458                    com.google.protobuf.ByteString.copyFromUtf8(
19459                        (java.lang.String) ref);
19460                realUser_ = b;
19461                return b;
19462              } else {
19463                return (com.google.protobuf.ByteString) ref;
19464              }
19465            }
19466            /**
19467             * <code>optional string realUser = 4;</code>
19468             */
19469            public Builder setRealUser(
19470                java.lang.String value) {
19471              if (value == null) {
19472        throw new NullPointerException();
19473      }
19474      bitField0_ |= 0x00000008;
19475              realUser_ = value;
19476              onChanged();
19477              return this;
19478            }
19479            /**
19480             * <code>optional string realUser = 4;</code>
19481             */
19482            public Builder clearRealUser() {
19483              bitField0_ = (bitField0_ & ~0x00000008);
19484              realUser_ = getDefaultInstance().getRealUser();
19485              onChanged();
19486              return this;
19487            }
19488            /**
19489             * <code>optional string realUser = 4;</code>
19490             */
19491            public Builder setRealUserBytes(
19492                com.google.protobuf.ByteString value) {
19493              if (value == null) {
19494        throw new NullPointerException();
19495      }
19496      bitField0_ |= 0x00000008;
19497              realUser_ = value;
19498              onChanged();
19499              return this;
19500            }
19501    
19502            // optional uint64 issueDate = 5;
19503            private long issueDate_ ;
19504            /**
19505             * <code>optional uint64 issueDate = 5;</code>
19506             */
19507            public boolean hasIssueDate() {
19508              return ((bitField0_ & 0x00000010) == 0x00000010);
19509            }
19510            /**
19511             * <code>optional uint64 issueDate = 5;</code>
19512             */
19513            public long getIssueDate() {
19514              return issueDate_;
19515            }
19516            /**
19517             * <code>optional uint64 issueDate = 5;</code>
19518             */
19519            public Builder setIssueDate(long value) {
19520              bitField0_ |= 0x00000010;
19521              issueDate_ = value;
19522              onChanged();
19523              return this;
19524            }
19525            /**
19526             * <code>optional uint64 issueDate = 5;</code>
19527             */
19528            public Builder clearIssueDate() {
19529              bitField0_ = (bitField0_ & ~0x00000010);
19530              issueDate_ = 0L;
19531              onChanged();
19532              return this;
19533            }
19534    
19535            // optional uint64 maxDate = 6;
19536            private long maxDate_ ;
19537            /**
19538             * <code>optional uint64 maxDate = 6;</code>
19539             */
19540            public boolean hasMaxDate() {
19541              return ((bitField0_ & 0x00000020) == 0x00000020);
19542            }
19543            /**
19544             * <code>optional uint64 maxDate = 6;</code>
19545             */
19546            public long getMaxDate() {
19547              return maxDate_;
19548            }
19549            /**
19550             * <code>optional uint64 maxDate = 6;</code>
19551             */
19552            public Builder setMaxDate(long value) {
19553              bitField0_ |= 0x00000020;
19554              maxDate_ = value;
19555              onChanged();
19556              return this;
19557            }
19558            /**
19559             * <code>optional uint64 maxDate = 6;</code>
19560             */
19561            public Builder clearMaxDate() {
19562              bitField0_ = (bitField0_ & ~0x00000020);
19563              maxDate_ = 0L;
19564              onChanged();
19565              return this;
19566            }
19567    
19568            // optional uint32 sequenceNumber = 7;
19569            private int sequenceNumber_ ;
19570            /**
19571             * <code>optional uint32 sequenceNumber = 7;</code>
19572             */
19573            public boolean hasSequenceNumber() {
19574              return ((bitField0_ & 0x00000040) == 0x00000040);
19575            }
19576            /**
19577             * <code>optional uint32 sequenceNumber = 7;</code>
19578             */
19579            public int getSequenceNumber() {
19580              return sequenceNumber_;
19581            }
19582            /**
19583             * <code>optional uint32 sequenceNumber = 7;</code>
19584             */
19585            public Builder setSequenceNumber(int value) {
19586              bitField0_ |= 0x00000040;
19587              sequenceNumber_ = value;
19588              onChanged();
19589              return this;
19590            }
19591            /**
19592             * <code>optional uint32 sequenceNumber = 7;</code>
19593             */
19594            public Builder clearSequenceNumber() {
19595              bitField0_ = (bitField0_ & ~0x00000040);
19596              sequenceNumber_ = 0;
19597              onChanged();
19598              return this;
19599            }
19600    
19601            // optional uint32 masterKeyId = 8;
19602            private int masterKeyId_ ;
19603            /**
19604             * <code>optional uint32 masterKeyId = 8;</code>
19605             */
19606            public boolean hasMasterKeyId() {
19607              return ((bitField0_ & 0x00000080) == 0x00000080);
19608            }
19609            /**
19610             * <code>optional uint32 masterKeyId = 8;</code>
19611             */
19612            public int getMasterKeyId() {
19613              return masterKeyId_;
19614            }
19615            /**
19616             * <code>optional uint32 masterKeyId = 8;</code>
19617             */
19618            public Builder setMasterKeyId(int value) {
19619              bitField0_ |= 0x00000080;
19620              masterKeyId_ = value;
19621              onChanged();
19622              return this;
19623            }
19624            /**
19625             * <code>optional uint32 masterKeyId = 8;</code>
19626             */
19627            public Builder clearMasterKeyId() {
19628              bitField0_ = (bitField0_ & ~0x00000080);
19629              masterKeyId_ = 0;
19630              onChanged();
19631              return this;
19632            }
19633    
19634            // optional uint64 expiryDate = 9;
19635            private long expiryDate_ ;
19636            /**
19637             * <code>optional uint64 expiryDate = 9;</code>
19638             */
19639            public boolean hasExpiryDate() {
19640              return ((bitField0_ & 0x00000100) == 0x00000100);
19641            }
19642            /**
19643             * <code>optional uint64 expiryDate = 9;</code>
19644             */
19645            public long getExpiryDate() {
19646              return expiryDate_;
19647            }
19648            /**
19649             * <code>optional uint64 expiryDate = 9;</code>
19650             */
19651            public Builder setExpiryDate(long value) {
19652              bitField0_ |= 0x00000100;
19653              expiryDate_ = value;
19654              onChanged();
19655              return this;
19656            }
19657            /**
19658             * <code>optional uint64 expiryDate = 9;</code>
19659             */
19660            public Builder clearExpiryDate() {
19661              bitField0_ = (bitField0_ & ~0x00000100);
19662              expiryDate_ = 0L;
19663              onChanged();
19664              return this;
19665            }
19666    
19667            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SecretManagerSection.PersistToken)
19668          }
19669    
19670          static {
19671            defaultInstance = new PersistToken(true);
19672            defaultInstance.initFields();
19673          }
19674    
19675          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SecretManagerSection.PersistToken)
19676        }
19677    
19678        private int bitField0_;
19679        // optional uint32 currentId = 1;
19680        public static final int CURRENTID_FIELD_NUMBER = 1;
19681        private int currentId_;
19682        /**
19683         * <code>optional uint32 currentId = 1;</code>
19684         */
19685        public boolean hasCurrentId() {
19686          return ((bitField0_ & 0x00000001) == 0x00000001);
19687        }
19688        /**
19689         * <code>optional uint32 currentId = 1;</code>
19690         */
19691        public int getCurrentId() {
19692          return currentId_;
19693        }
19694    
19695        // optional uint32 tokenSequenceNumber = 2;
19696        public static final int TOKENSEQUENCENUMBER_FIELD_NUMBER = 2;
19697        private int tokenSequenceNumber_;
19698        /**
19699         * <code>optional uint32 tokenSequenceNumber = 2;</code>
19700         */
19701        public boolean hasTokenSequenceNumber() {
19702          return ((bitField0_ & 0x00000002) == 0x00000002);
19703        }
19704        /**
19705         * <code>optional uint32 tokenSequenceNumber = 2;</code>
19706         */
19707        public int getTokenSequenceNumber() {
19708          return tokenSequenceNumber_;
19709        }
19710    
19711        // optional uint32 numKeys = 3;
19712        public static final int NUMKEYS_FIELD_NUMBER = 3;
19713        private int numKeys_;
19714        /**
19715         * <code>optional uint32 numKeys = 3;</code>
19716         */
19717        public boolean hasNumKeys() {
19718          return ((bitField0_ & 0x00000004) == 0x00000004);
19719        }
19720        /**
19721         * <code>optional uint32 numKeys = 3;</code>
19722         */
19723        public int getNumKeys() {
19724          return numKeys_;
19725        }
19726    
19727        // optional uint32 numTokens = 4;
19728        public static final int NUMTOKENS_FIELD_NUMBER = 4;
19729        private int numTokens_;
19730        /**
19731         * <code>optional uint32 numTokens = 4;</code>
19732         *
19733         * <pre>
19734         * repeated DelegationKey keys
19735         * repeated PersistToken tokens
19736         * </pre>
19737         */
19738        public boolean hasNumTokens() {
19739          return ((bitField0_ & 0x00000008) == 0x00000008);
19740        }
19741        /**
19742         * <code>optional uint32 numTokens = 4;</code>
19743         *
19744         * <pre>
19745         * repeated DelegationKey keys
19746         * repeated PersistToken tokens
19747         * </pre>
19748         */
19749        public int getNumTokens() {
19750          return numTokens_;
19751        }
19752    
19753        private void initFields() {
19754          currentId_ = 0;
19755          tokenSequenceNumber_ = 0;
19756          numKeys_ = 0;
19757          numTokens_ = 0;
19758        }
19759        private byte memoizedIsInitialized = -1;
19760        public final boolean isInitialized() {
19761          byte isInitialized = memoizedIsInitialized;
19762          if (isInitialized != -1) return isInitialized == 1;
19763    
19764          memoizedIsInitialized = 1;
19765          return true;
19766        }
19767    
19768        public void writeTo(com.google.protobuf.CodedOutputStream output)
19769                            throws java.io.IOException {
19770          getSerializedSize();
19771          if (((bitField0_ & 0x00000001) == 0x00000001)) {
19772            output.writeUInt32(1, currentId_);
19773          }
19774          if (((bitField0_ & 0x00000002) == 0x00000002)) {
19775            output.writeUInt32(2, tokenSequenceNumber_);
19776          }
19777          if (((bitField0_ & 0x00000004) == 0x00000004)) {
19778            output.writeUInt32(3, numKeys_);
19779          }
19780          if (((bitField0_ & 0x00000008) == 0x00000008)) {
19781            output.writeUInt32(4, numTokens_);
19782          }
19783          getUnknownFields().writeTo(output);
19784        }
19785    
19786        private int memoizedSerializedSize = -1;
19787        public int getSerializedSize() {
19788          int size = memoizedSerializedSize;
19789          if (size != -1) return size;
19790    
19791          size = 0;
19792          if (((bitField0_ & 0x00000001) == 0x00000001)) {
19793            size += com.google.protobuf.CodedOutputStream
19794              .computeUInt32Size(1, currentId_);
19795          }
19796          if (((bitField0_ & 0x00000002) == 0x00000002)) {
19797            size += com.google.protobuf.CodedOutputStream
19798              .computeUInt32Size(2, tokenSequenceNumber_);
19799          }
19800          if (((bitField0_ & 0x00000004) == 0x00000004)) {
19801            size += com.google.protobuf.CodedOutputStream
19802              .computeUInt32Size(3, numKeys_);
19803          }
19804          if (((bitField0_ & 0x00000008) == 0x00000008)) {
19805            size += com.google.protobuf.CodedOutputStream
19806              .computeUInt32Size(4, numTokens_);
19807          }
19808          size += getUnknownFields().getSerializedSize();
19809          memoizedSerializedSize = size;
19810          return size;
19811        }
19812    
19813        private static final long serialVersionUID = 0L;
19814        @java.lang.Override
19815        protected java.lang.Object writeReplace()
19816            throws java.io.ObjectStreamException {
19817          return super.writeReplace();
19818        }
19819    
19820        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
19821            com.google.protobuf.ByteString data)
19822            throws com.google.protobuf.InvalidProtocolBufferException {
19823          return PARSER.parseFrom(data);
19824        }
19825        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
19826            com.google.protobuf.ByteString data,
19827            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19828            throws com.google.protobuf.InvalidProtocolBufferException {
19829          return PARSER.parseFrom(data, extensionRegistry);
19830        }
19831        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(byte[] data)
19832            throws com.google.protobuf.InvalidProtocolBufferException {
19833          return PARSER.parseFrom(data);
19834        }
19835        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
19836            byte[] data,
19837            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19838            throws com.google.protobuf.InvalidProtocolBufferException {
19839          return PARSER.parseFrom(data, extensionRegistry);
19840        }
19841        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(java.io.InputStream input)
19842            throws java.io.IOException {
19843          return PARSER.parseFrom(input);
19844        }
19845        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
19846            java.io.InputStream input,
19847            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19848            throws java.io.IOException {
19849          return PARSER.parseFrom(input, extensionRegistry);
19850        }
19851        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseDelimitedFrom(java.io.InputStream input)
19852            throws java.io.IOException {
19853          return PARSER.parseDelimitedFrom(input);
19854        }
19855        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseDelimitedFrom(
19856            java.io.InputStream input,
19857            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19858            throws java.io.IOException {
19859          return PARSER.parseDelimitedFrom(input, extensionRegistry);
19860        }
19861        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
19862            com.google.protobuf.CodedInputStream input)
19863            throws java.io.IOException {
19864          return PARSER.parseFrom(input);
19865        }
19866        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
19867            com.google.protobuf.CodedInputStream input,
19868            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19869            throws java.io.IOException {
19870          return PARSER.parseFrom(input, extensionRegistry);
19871        }
19872    
19873        public static Builder newBuilder() { return Builder.create(); }
19874        public Builder newBuilderForType() { return newBuilder(); }
19875        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection prototype) {
19876          return newBuilder().mergeFrom(prototype);
19877        }
19878        public Builder toBuilder() { return newBuilder(this); }
19879    
19880        @java.lang.Override
19881        protected Builder newBuilderForType(
19882            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19883          Builder builder = new Builder(parent);
19884          return builder;
19885        }
19886        /**
19887         * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection}
19888         */
19889        public static final class Builder extends
19890            com.google.protobuf.GeneratedMessage.Builder<Builder>
19891           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSectionOrBuilder {
19892          public static final com.google.protobuf.Descriptors.Descriptor
19893              getDescriptor() {
19894            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
19895          }
19896    
19897          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19898              internalGetFieldAccessorTable() {
19899            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable
19900                .ensureFieldAccessorsInitialized(
19901                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.Builder.class);
19902          }
19903    
19904          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.newBuilder()
19905          private Builder() {
19906            maybeForceBuilderInitialization();
19907          }
19908    
19909          private Builder(
19910              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19911            super(parent);
19912            maybeForceBuilderInitialization();
19913          }
19914          private void maybeForceBuilderInitialization() {
19915            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19916            }
19917          }
19918          private static Builder create() {
19919            return new Builder();
19920          }
19921    
19922          public Builder clear() {
19923            super.clear();
19924            currentId_ = 0;
19925            bitField0_ = (bitField0_ & ~0x00000001);
19926            tokenSequenceNumber_ = 0;
19927            bitField0_ = (bitField0_ & ~0x00000002);
19928            numKeys_ = 0;
19929            bitField0_ = (bitField0_ & ~0x00000004);
19930            numTokens_ = 0;
19931            bitField0_ = (bitField0_ & ~0x00000008);
19932            return this;
19933          }
19934    
19935          public Builder clone() {
19936            return create().mergeFrom(buildPartial());
19937          }
19938    
19939          public com.google.protobuf.Descriptors.Descriptor
19940              getDescriptorForType() {
19941            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
19942          }
19943    
19944          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection getDefaultInstanceForType() {
19945            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.getDefaultInstance();
19946          }
19947    
19948          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection build() {
19949            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection result = buildPartial();
19950            if (!result.isInitialized()) {
19951              throw newUninitializedMessageException(result);
19952            }
19953            return result;
19954          }
19955    
19956          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection buildPartial() {
19957            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection(this);
19958            int from_bitField0_ = bitField0_;
19959            int to_bitField0_ = 0;
19960            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
19961              to_bitField0_ |= 0x00000001;
19962            }
19963            result.currentId_ = currentId_;
19964            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
19965              to_bitField0_ |= 0x00000002;
19966            }
19967            result.tokenSequenceNumber_ = tokenSequenceNumber_;
19968            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
19969              to_bitField0_ |= 0x00000004;
19970            }
19971            result.numKeys_ = numKeys_;
19972            if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
19973              to_bitField0_ |= 0x00000008;
19974            }
19975            result.numTokens_ = numTokens_;
19976            result.bitField0_ = to_bitField0_;
19977            onBuilt();
19978            return result;
19979          }
19980    
19981          public Builder mergeFrom(com.google.protobuf.Message other) {
19982            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection) {
19983              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection)other);
19984            } else {
19985              super.mergeFrom(other);
19986              return this;
19987            }
19988          }
19989    
19990          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection other) {
19991            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.getDefaultInstance()) return this;
19992            if (other.hasCurrentId()) {
19993              setCurrentId(other.getCurrentId());
19994            }
19995            if (other.hasTokenSequenceNumber()) {
19996              setTokenSequenceNumber(other.getTokenSequenceNumber());
19997            }
19998            if (other.hasNumKeys()) {
19999              setNumKeys(other.getNumKeys());
20000            }
20001            if (other.hasNumTokens()) {
20002              setNumTokens(other.getNumTokens());
20003            }
20004            this.mergeUnknownFields(other.getUnknownFields());
20005            return this;
20006          }
20007    
20008          public final boolean isInitialized() {
20009            return true;
20010          }
20011    
20012          public Builder mergeFrom(
20013              com.google.protobuf.CodedInputStream input,
20014              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20015              throws java.io.IOException {
20016            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parsedMessage = null;
20017            try {
20018              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20019            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20020              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection) e.getUnfinishedMessage();
20021              throw e;
20022            } finally {
20023              if (parsedMessage != null) {
20024                mergeFrom(parsedMessage);
20025              }
20026            }
20027            return this;
20028          }
20029          private int bitField0_;
20030    
20031          // optional uint32 currentId = 1;
20032          private int currentId_ ;
20033          /**
20034           * <code>optional uint32 currentId = 1;</code>
20035           */
20036          public boolean hasCurrentId() {
20037            return ((bitField0_ & 0x00000001) == 0x00000001);
20038          }
20039          /**
20040           * <code>optional uint32 currentId = 1;</code>
20041           */
20042          public int getCurrentId() {
20043            return currentId_;
20044          }
20045          /**
20046           * <code>optional uint32 currentId = 1;</code>
20047           */
20048          public Builder setCurrentId(int value) {
20049            bitField0_ |= 0x00000001;
20050            currentId_ = value;
20051            onChanged();
20052            return this;
20053          }
20054          /**
20055           * <code>optional uint32 currentId = 1;</code>
20056           */
20057          public Builder clearCurrentId() {
20058            bitField0_ = (bitField0_ & ~0x00000001);
20059            currentId_ = 0;
20060            onChanged();
20061            return this;
20062          }
20063    
20064          // optional uint32 tokenSequenceNumber = 2;
20065          private int tokenSequenceNumber_ ;
20066          /**
20067           * <code>optional uint32 tokenSequenceNumber = 2;</code>
20068           */
20069          public boolean hasTokenSequenceNumber() {
20070            return ((bitField0_ & 0x00000002) == 0x00000002);
20071          }
20072          /**
20073           * <code>optional uint32 tokenSequenceNumber = 2;</code>
20074           */
20075          public int getTokenSequenceNumber() {
20076            return tokenSequenceNumber_;
20077          }
20078          /**
20079           * <code>optional uint32 tokenSequenceNumber = 2;</code>
20080           */
20081          public Builder setTokenSequenceNumber(int value) {
20082            bitField0_ |= 0x00000002;
20083            tokenSequenceNumber_ = value;
20084            onChanged();
20085            return this;
20086          }
20087          /**
20088           * <code>optional uint32 tokenSequenceNumber = 2;</code>
20089           */
20090          public Builder clearTokenSequenceNumber() {
20091            bitField0_ = (bitField0_ & ~0x00000002);
20092            tokenSequenceNumber_ = 0;
20093            onChanged();
20094            return this;
20095          }
20096    
20097          // optional uint32 numKeys = 3;
20098          private int numKeys_ ;
20099          /**
20100           * <code>optional uint32 numKeys = 3;</code>
20101           */
20102          public boolean hasNumKeys() {
20103            return ((bitField0_ & 0x00000004) == 0x00000004);
20104          }
20105          /**
20106           * <code>optional uint32 numKeys = 3;</code>
20107           */
20108          public int getNumKeys() {
20109            return numKeys_;
20110          }
20111          /**
20112           * <code>optional uint32 numKeys = 3;</code>
20113           */
20114          public Builder setNumKeys(int value) {
20115            bitField0_ |= 0x00000004;
20116            numKeys_ = value;
20117            onChanged();
20118            return this;
20119          }
20120          /**
20121           * <code>optional uint32 numKeys = 3;</code>
20122           */
20123          public Builder clearNumKeys() {
20124            bitField0_ = (bitField0_ & ~0x00000004);
20125            numKeys_ = 0;
20126            onChanged();
20127            return this;
20128          }
20129    
20130          // optional uint32 numTokens = 4;
20131          private int numTokens_ ;
20132          /**
20133           * <code>optional uint32 numTokens = 4;</code>
20134           *
20135           * <pre>
20136           * repeated DelegationKey keys
20137           * repeated PersistToken tokens
20138           * </pre>
20139           */
20140          public boolean hasNumTokens() {
20141            return ((bitField0_ & 0x00000008) == 0x00000008);
20142          }
20143          /**
20144           * <code>optional uint32 numTokens = 4;</code>
20145           *
20146           * <pre>
20147           * repeated DelegationKey keys
20148           * repeated PersistToken tokens
20149           * </pre>
20150           */
20151          public int getNumTokens() {
20152            return numTokens_;
20153          }
20154          /**
20155           * <code>optional uint32 numTokens = 4;</code>
20156           *
20157           * <pre>
20158           * repeated DelegationKey keys
20159           * repeated PersistToken tokens
20160           * </pre>
20161           */
20162          public Builder setNumTokens(int value) {
20163            bitField0_ |= 0x00000008;
20164            numTokens_ = value;
20165            onChanged();
20166            return this;
20167          }
20168          /**
20169           * <code>optional uint32 numTokens = 4;</code>
20170           *
20171           * <pre>
20172           * repeated DelegationKey keys
20173           * repeated PersistToken tokens
20174           * </pre>
20175           */
20176          public Builder clearNumTokens() {
20177            bitField0_ = (bitField0_ & ~0x00000008);
20178            numTokens_ = 0;
20179            onChanged();
20180            return this;
20181          }
20182    
20183          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SecretManagerSection)
20184        }
20185    
20186        static {
20187          defaultInstance = new SecretManagerSection(true);
20188          defaultInstance.initFields();
20189        }
20190    
20191        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SecretManagerSection)
20192      }
20193    
20194      public interface CacheManagerSectionOrBuilder
20195          extends com.google.protobuf.MessageOrBuilder {
20196    
20197        // required uint64 nextDirectiveId = 1;
20198        /**
20199         * <code>required uint64 nextDirectiveId = 1;</code>
20200         */
20201        boolean hasNextDirectiveId();
20202        /**
20203         * <code>required uint64 nextDirectiveId = 1;</code>
20204         */
20205        long getNextDirectiveId();
20206    
20207        // required uint32 numPools = 2;
20208        /**
20209         * <code>required uint32 numPools = 2;</code>
20210         */
20211        boolean hasNumPools();
20212        /**
20213         * <code>required uint32 numPools = 2;</code>
20214         */
20215        int getNumPools();
20216    
20217        // required uint32 numDirectives = 3;
20218        /**
20219         * <code>required uint32 numDirectives = 3;</code>
20220         *
20221         * <pre>
20222         * repeated CachePoolInfoProto pools
20223         * repeated CacheDirectiveInfoProto directives
20224         * </pre>
20225         */
20226        boolean hasNumDirectives();
20227        /**
20228         * <code>required uint32 numDirectives = 3;</code>
20229         *
20230         * <pre>
20231         * repeated CachePoolInfoProto pools
20232         * repeated CacheDirectiveInfoProto directives
20233         * </pre>
20234         */
20235        int getNumDirectives();
20236      }
20237      /**
20238       * Protobuf type {@code hadoop.hdfs.fsimage.CacheManagerSection}
20239       */
20240      public static final class CacheManagerSection extends
20241          com.google.protobuf.GeneratedMessage
20242          implements CacheManagerSectionOrBuilder {
20243        // Use CacheManagerSection.newBuilder() to construct.
20244        private CacheManagerSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
20245          super(builder);
20246          this.unknownFields = builder.getUnknownFields();
20247        }
20248        private CacheManagerSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
20249    
20250        private static final CacheManagerSection defaultInstance;
20251        public static CacheManagerSection getDefaultInstance() {
20252          return defaultInstance;
20253        }
20254    
20255        public CacheManagerSection getDefaultInstanceForType() {
20256          return defaultInstance;
20257        }
20258    
20259        private final com.google.protobuf.UnknownFieldSet unknownFields;
20260        @java.lang.Override
20261        public final com.google.protobuf.UnknownFieldSet
20262            getUnknownFields() {
20263          return this.unknownFields;
20264        }
20265        private CacheManagerSection(
20266            com.google.protobuf.CodedInputStream input,
20267            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20268            throws com.google.protobuf.InvalidProtocolBufferException {
20269          initFields();
20270          int mutable_bitField0_ = 0;
20271          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
20272              com.google.protobuf.UnknownFieldSet.newBuilder();
20273          try {
20274            boolean done = false;
20275            while (!done) {
20276              int tag = input.readTag();
20277              switch (tag) {
20278                case 0:
20279                  done = true;
20280                  break;
20281                default: {
20282                  if (!parseUnknownField(input, unknownFields,
20283                                         extensionRegistry, tag)) {
20284                    done = true;
20285                  }
20286                  break;
20287                }
20288                case 8: {
20289                  bitField0_ |= 0x00000001;
20290                  nextDirectiveId_ = input.readUInt64();
20291                  break;
20292                }
20293                case 16: {
20294                  bitField0_ |= 0x00000002;
20295                  numPools_ = input.readUInt32();
20296                  break;
20297                }
20298                case 24: {
20299                  bitField0_ |= 0x00000004;
20300                  numDirectives_ = input.readUInt32();
20301                  break;
20302                }
20303              }
20304            }
20305          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20306            throw e.setUnfinishedMessage(this);
20307          } catch (java.io.IOException e) {
20308            throw new com.google.protobuf.InvalidProtocolBufferException(
20309                e.getMessage()).setUnfinishedMessage(this);
20310          } finally {
20311            this.unknownFields = unknownFields.build();
20312            makeExtensionsImmutable();
20313          }
20314        }
20315        public static final com.google.protobuf.Descriptors.Descriptor
20316            getDescriptor() {
20317          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
20318        }
20319    
20320        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20321            internalGetFieldAccessorTable() {
20322          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable
20323              .ensureFieldAccessorsInitialized(
20324                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.Builder.class);
20325        }
20326    
20327        public static com.google.protobuf.Parser<CacheManagerSection> PARSER =
20328            new com.google.protobuf.AbstractParser<CacheManagerSection>() {
20329          public CacheManagerSection parsePartialFrom(
20330              com.google.protobuf.CodedInputStream input,
20331              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20332              throws com.google.protobuf.InvalidProtocolBufferException {
20333            return new CacheManagerSection(input, extensionRegistry);
20334          }
20335        };
20336    
20337        @java.lang.Override
20338        public com.google.protobuf.Parser<CacheManagerSection> getParserForType() {
20339          return PARSER;
20340        }
20341    
20342        private int bitField0_;
20343        // required uint64 nextDirectiveId = 1;
20344        public static final int NEXTDIRECTIVEID_FIELD_NUMBER = 1;
20345        private long nextDirectiveId_;
20346        /**
20347         * <code>required uint64 nextDirectiveId = 1;</code>
20348         */
20349        public boolean hasNextDirectiveId() {
20350          return ((bitField0_ & 0x00000001) == 0x00000001);
20351        }
20352        /**
20353         * <code>required uint64 nextDirectiveId = 1;</code>
20354         */
20355        public long getNextDirectiveId() {
20356          return nextDirectiveId_;
20357        }
20358    
20359        // required uint32 numPools = 2;
20360        public static final int NUMPOOLS_FIELD_NUMBER = 2;
20361        private int numPools_;
20362        /**
20363         * <code>required uint32 numPools = 2;</code>
20364         */
20365        public boolean hasNumPools() {
20366          return ((bitField0_ & 0x00000002) == 0x00000002);
20367        }
20368        /**
20369         * <code>required uint32 numPools = 2;</code>
20370         */
20371        public int getNumPools() {
20372          return numPools_;
20373        }
20374    
20375        // required uint32 numDirectives = 3;
20376        public static final int NUMDIRECTIVES_FIELD_NUMBER = 3;
20377        private int numDirectives_;
20378        /**
20379         * <code>required uint32 numDirectives = 3;</code>
20380         *
20381         * <pre>
20382         * repeated CachePoolInfoProto pools
20383         * repeated CacheDirectiveInfoProto directives
20384         * </pre>
20385         */
20386        public boolean hasNumDirectives() {
20387          return ((bitField0_ & 0x00000004) == 0x00000004);
20388        }
20389        /**
20390         * <code>required uint32 numDirectives = 3;</code>
20391         *
20392         * <pre>
20393         * repeated CachePoolInfoProto pools
20394         * repeated CacheDirectiveInfoProto directives
20395         * </pre>
20396         */
20397        public int getNumDirectives() {
20398          return numDirectives_;
20399        }
20400    
20401        private void initFields() {
20402          nextDirectiveId_ = 0L;
20403          numPools_ = 0;
20404          numDirectives_ = 0;
20405        }
20406        private byte memoizedIsInitialized = -1;
20407        public final boolean isInitialized() {
20408          byte isInitialized = memoizedIsInitialized;
20409          if (isInitialized != -1) return isInitialized == 1;
20410    
20411          if (!hasNextDirectiveId()) {
20412            memoizedIsInitialized = 0;
20413            return false;
20414          }
20415          if (!hasNumPools()) {
20416            memoizedIsInitialized = 0;
20417            return false;
20418          }
20419          if (!hasNumDirectives()) {
20420            memoizedIsInitialized = 0;
20421            return false;
20422          }
20423          memoizedIsInitialized = 1;
20424          return true;
20425        }
20426    
20427        public void writeTo(com.google.protobuf.CodedOutputStream output)
20428                            throws java.io.IOException {
20429          getSerializedSize();
20430          if (((bitField0_ & 0x00000001) == 0x00000001)) {
20431            output.writeUInt64(1, nextDirectiveId_);
20432          }
20433          if (((bitField0_ & 0x00000002) == 0x00000002)) {
20434            output.writeUInt32(2, numPools_);
20435          }
20436          if (((bitField0_ & 0x00000004) == 0x00000004)) {
20437            output.writeUInt32(3, numDirectives_);
20438          }
20439          getUnknownFields().writeTo(output);
20440        }
20441    
20442        private int memoizedSerializedSize = -1;
20443        public int getSerializedSize() {
20444          int size = memoizedSerializedSize;
20445          if (size != -1) return size;
20446    
20447          size = 0;
20448          if (((bitField0_ & 0x00000001) == 0x00000001)) {
20449            size += com.google.protobuf.CodedOutputStream
20450              .computeUInt64Size(1, nextDirectiveId_);
20451          }
20452          if (((bitField0_ & 0x00000002) == 0x00000002)) {
20453            size += com.google.protobuf.CodedOutputStream
20454              .computeUInt32Size(2, numPools_);
20455          }
20456          if (((bitField0_ & 0x00000004) == 0x00000004)) {
20457            size += com.google.protobuf.CodedOutputStream
20458              .computeUInt32Size(3, numDirectives_);
20459          }
20460          size += getUnknownFields().getSerializedSize();
20461          memoizedSerializedSize = size;
20462          return size;
20463        }
20464    
20465        private static final long serialVersionUID = 0L;
20466        @java.lang.Override
20467        protected java.lang.Object writeReplace()
20468            throws java.io.ObjectStreamException {
20469          return super.writeReplace();
20470        }
20471    
20472        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
20473            com.google.protobuf.ByteString data)
20474            throws com.google.protobuf.InvalidProtocolBufferException {
20475          return PARSER.parseFrom(data);
20476        }
20477        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
20478            com.google.protobuf.ByteString data,
20479            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20480            throws com.google.protobuf.InvalidProtocolBufferException {
20481          return PARSER.parseFrom(data, extensionRegistry);
20482        }
20483        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(byte[] data)
20484            throws com.google.protobuf.InvalidProtocolBufferException {
20485          return PARSER.parseFrom(data);
20486        }
20487        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
20488            byte[] data,
20489            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20490            throws com.google.protobuf.InvalidProtocolBufferException {
20491          return PARSER.parseFrom(data, extensionRegistry);
20492        }
20493        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(java.io.InputStream input)
20494            throws java.io.IOException {
20495          return PARSER.parseFrom(input);
20496        }
20497        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
20498            java.io.InputStream input,
20499            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20500            throws java.io.IOException {
20501          return PARSER.parseFrom(input, extensionRegistry);
20502        }
20503        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseDelimitedFrom(java.io.InputStream input)
20504            throws java.io.IOException {
20505          return PARSER.parseDelimitedFrom(input);
20506        }
20507        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseDelimitedFrom(
20508            java.io.InputStream input,
20509            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20510            throws java.io.IOException {
20511          return PARSER.parseDelimitedFrom(input, extensionRegistry);
20512        }
20513        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
20514            com.google.protobuf.CodedInputStream input)
20515            throws java.io.IOException {
20516          return PARSER.parseFrom(input);
20517        }
20518        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
20519            com.google.protobuf.CodedInputStream input,
20520            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20521            throws java.io.IOException {
20522          return PARSER.parseFrom(input, extensionRegistry);
20523        }
20524    
20525        public static Builder newBuilder() { return Builder.create(); }
20526        public Builder newBuilderForType() { return newBuilder(); }
20527        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection prototype) {
20528          return newBuilder().mergeFrom(prototype);
20529        }
20530        public Builder toBuilder() { return newBuilder(this); }
20531    
20532        @java.lang.Override
20533        protected Builder newBuilderForType(
20534            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20535          Builder builder = new Builder(parent);
20536          return builder;
20537        }
20538        /**
20539         * Protobuf type {@code hadoop.hdfs.fsimage.CacheManagerSection}
20540         */
20541        public static final class Builder extends
20542            com.google.protobuf.GeneratedMessage.Builder<Builder>
20543           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSectionOrBuilder {
20544          public static final com.google.protobuf.Descriptors.Descriptor
20545              getDescriptor() {
20546            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
20547          }
20548    
20549          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20550              internalGetFieldAccessorTable() {
20551            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable
20552                .ensureFieldAccessorsInitialized(
20553                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.Builder.class);
20554          }
20555    
20556          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.newBuilder()
20557          private Builder() {
20558            maybeForceBuilderInitialization();
20559          }
20560    
20561          private Builder(
20562              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20563            super(parent);
20564            maybeForceBuilderInitialization();
20565          }
20566          private void maybeForceBuilderInitialization() {
20567            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
20568            }
20569          }
20570          private static Builder create() {
20571            return new Builder();
20572          }
20573    
20574          public Builder clear() {
20575            super.clear();
20576            nextDirectiveId_ = 0L;
20577            bitField0_ = (bitField0_ & ~0x00000001);
20578            numPools_ = 0;
20579            bitField0_ = (bitField0_ & ~0x00000002);
20580            numDirectives_ = 0;
20581            bitField0_ = (bitField0_ & ~0x00000004);
20582            return this;
20583          }
20584    
20585          public Builder clone() {
20586            return create().mergeFrom(buildPartial());
20587          }
20588    
20589          public com.google.protobuf.Descriptors.Descriptor
20590              getDescriptorForType() {
20591            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
20592          }
20593    
20594          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection getDefaultInstanceForType() {
20595            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.getDefaultInstance();
20596          }
20597    
20598          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection build() {
20599            org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection result = buildPartial();
20600            if (!result.isInitialized()) {
20601              throw newUninitializedMessageException(result);
20602            }
20603            return result;
20604          }
20605    
20606          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection buildPartial() {
20607            org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection(this);
20608            int from_bitField0_ = bitField0_;
20609            int to_bitField0_ = 0;
20610            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
20611              to_bitField0_ |= 0x00000001;
20612            }
20613            result.nextDirectiveId_ = nextDirectiveId_;
20614            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
20615              to_bitField0_ |= 0x00000002;
20616            }
20617            result.numPools_ = numPools_;
20618            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
20619              to_bitField0_ |= 0x00000004;
20620            }
20621            result.numDirectives_ = numDirectives_;
20622            result.bitField0_ = to_bitField0_;
20623            onBuilt();
20624            return result;
20625          }
20626    
20627          public Builder mergeFrom(com.google.protobuf.Message other) {
20628            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) {
20629              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection)other);
20630            } else {
20631              super.mergeFrom(other);
20632              return this;
20633            }
20634          }
20635    
20636          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection other) {
20637            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.getDefaultInstance()) return this;
20638            if (other.hasNextDirectiveId()) {
20639              setNextDirectiveId(other.getNextDirectiveId());
20640            }
20641            if (other.hasNumPools()) {
20642              setNumPools(other.getNumPools());
20643            }
20644            if (other.hasNumDirectives()) {
20645              setNumDirectives(other.getNumDirectives());
20646            }
20647            this.mergeUnknownFields(other.getUnknownFields());
20648            return this;
20649          }
20650    
20651          public final boolean isInitialized() {
20652            if (!hasNextDirectiveId()) {
20653              
20654              return false;
20655            }
20656            if (!hasNumPools()) {
20657              
20658              return false;
20659            }
20660            if (!hasNumDirectives()) {
20661              
20662              return false;
20663            }
20664            return true;
20665          }
20666    
20667          public Builder mergeFrom(
20668              com.google.protobuf.CodedInputStream input,
20669              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20670              throws java.io.IOException {
20671            org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parsedMessage = null;
20672            try {
20673              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20674            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20675              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) e.getUnfinishedMessage();
20676              throw e;
20677            } finally {
20678              if (parsedMessage != null) {
20679                mergeFrom(parsedMessage);
20680              }
20681            }
20682            return this;
20683          }
20684          private int bitField0_;
20685    
20686          // required uint64 nextDirectiveId = 1;
20687          private long nextDirectiveId_ ;
20688          /**
20689           * <code>required uint64 nextDirectiveId = 1;</code>
20690           */
20691          public boolean hasNextDirectiveId() {
20692            return ((bitField0_ & 0x00000001) == 0x00000001);
20693          }
20694          /**
20695           * <code>required uint64 nextDirectiveId = 1;</code>
20696           */
20697          public long getNextDirectiveId() {
20698            return nextDirectiveId_;
20699          }
20700          /**
20701           * <code>required uint64 nextDirectiveId = 1;</code>
20702           */
20703          public Builder setNextDirectiveId(long value) {
20704            bitField0_ |= 0x00000001;
20705            nextDirectiveId_ = value;
20706            onChanged();
20707            return this;
20708          }
20709          /**
20710           * <code>required uint64 nextDirectiveId = 1;</code>
20711           */
20712          public Builder clearNextDirectiveId() {
20713            bitField0_ = (bitField0_ & ~0x00000001);
20714            nextDirectiveId_ = 0L;
20715            onChanged();
20716            return this;
20717          }
20718    
20719          // required uint32 numPools = 2;
20720          private int numPools_ ;
20721          /**
20722           * <code>required uint32 numPools = 2;</code>
20723           */
20724          public boolean hasNumPools() {
20725            return ((bitField0_ & 0x00000002) == 0x00000002);
20726          }
20727          /**
20728           * <code>required uint32 numPools = 2;</code>
20729           */
20730          public int getNumPools() {
20731            return numPools_;
20732          }
20733          /**
20734           * <code>required uint32 numPools = 2;</code>
20735           */
20736          public Builder setNumPools(int value) {
20737            bitField0_ |= 0x00000002;
20738            numPools_ = value;
20739            onChanged();
20740            return this;
20741          }
20742          /**
20743           * <code>required uint32 numPools = 2;</code>
20744           */
20745          public Builder clearNumPools() {
20746            bitField0_ = (bitField0_ & ~0x00000002);
20747            numPools_ = 0;
20748            onChanged();
20749            return this;
20750          }
20751    
20752          // required uint32 numDirectives = 3;
20753          private int numDirectives_ ;
20754          /**
20755           * <code>required uint32 numDirectives = 3;</code>
20756           *
20757           * <pre>
20758           * repeated CachePoolInfoProto pools
20759           * repeated CacheDirectiveInfoProto directives
20760           * </pre>
20761           */
20762          public boolean hasNumDirectives() {
20763            return ((bitField0_ & 0x00000004) == 0x00000004);
20764          }
20765          /**
20766           * <code>required uint32 numDirectives = 3;</code>
20767           *
20768           * <pre>
20769           * repeated CachePoolInfoProto pools
20770           * repeated CacheDirectiveInfoProto directives
20771           * </pre>
20772           */
20773          public int getNumDirectives() {
20774            return numDirectives_;
20775          }
20776          /**
20777           * <code>required uint32 numDirectives = 3;</code>
20778           *
20779           * <pre>
20780           * repeated CachePoolInfoProto pools
20781           * repeated CacheDirectiveInfoProto directives
20782           * </pre>
20783           */
20784          public Builder setNumDirectives(int value) {
20785            bitField0_ |= 0x00000004;
20786            numDirectives_ = value;
20787            onChanged();
20788            return this;
20789          }
20790          /**
20791           * <code>required uint32 numDirectives = 3;</code>
20792           *
20793           * <pre>
20794           * repeated CachePoolInfoProto pools
20795           * repeated CacheDirectiveInfoProto directives
20796           * </pre>
20797           */
20798          public Builder clearNumDirectives() {
20799            bitField0_ = (bitField0_ & ~0x00000004);
20800            numDirectives_ = 0;
20801            onChanged();
20802            return this;
20803          }
20804    
20805          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.CacheManagerSection)
20806        }
20807    
20808        static {
20809          defaultInstance = new CacheManagerSection(true);
20810          defaultInstance.initFields();
20811        }
20812    
20813        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.CacheManagerSection)
20814      }
20815    
20816      private static com.google.protobuf.Descriptors.Descriptor
20817        internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
20818      private static
20819        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20820          internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable;
20821      private static com.google.protobuf.Descriptors.Descriptor
20822        internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
20823      private static
20824        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20825          internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable;
20826      private static com.google.protobuf.Descriptors.Descriptor
20827        internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
20828      private static
20829        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20830          internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable;
20831      private static com.google.protobuf.Descriptors.Descriptor
20832        internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
20833      private static
20834        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20835          internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable;
20836      private static com.google.protobuf.Descriptors.Descriptor
20837        internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
20838      private static
20839        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20840          internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable;
20841      private static com.google.protobuf.Descriptors.Descriptor
20842        internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
20843      private static
20844        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20845          internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable;
20846      private static com.google.protobuf.Descriptors.Descriptor
20847        internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
20848      private static
20849        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20850          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable;
20851      private static com.google.protobuf.Descriptors.Descriptor
20852        internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
20853      private static
20854        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20855          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable;
20856      private static com.google.protobuf.Descriptors.Descriptor
20857        internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
20858      private static
20859        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20860          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable;
20861      private static com.google.protobuf.Descriptors.Descriptor
20862        internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
20863      private static
20864        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20865          internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable;
20866      private static com.google.protobuf.Descriptors.Descriptor
20867        internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
20868      private static
20869        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20870          internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable;
20871      private static com.google.protobuf.Descriptors.Descriptor
20872        internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
20873      private static
20874        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20875          internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable;
20876      private static com.google.protobuf.Descriptors.Descriptor
20877        internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
20878      private static
20879        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20880          internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable;
20881      private static com.google.protobuf.Descriptors.Descriptor
20882        internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
20883      private static
20884        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20885          internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable;
20886      private static com.google.protobuf.Descriptors.Descriptor
20887        internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
20888      private static
20889        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20890          internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable;
20891      private static com.google.protobuf.Descriptors.Descriptor
20892        internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
20893      private static
20894        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20895          internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable;
20896      private static com.google.protobuf.Descriptors.Descriptor
20897        internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
20898      private static
20899        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20900          internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable;
20901      private static com.google.protobuf.Descriptors.Descriptor
20902        internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
20903      private static
20904        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20905          internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable;
20906      private static com.google.protobuf.Descriptors.Descriptor
20907        internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
20908      private static
20909        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20910          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable;
20911      private static com.google.protobuf.Descriptors.Descriptor
20912        internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
20913      private static
20914        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20915          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable;
20916      private static com.google.protobuf.Descriptors.Descriptor
20917        internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
20918      private static
20919        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20920          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable;
20921      private static com.google.protobuf.Descriptors.Descriptor
20922        internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
20923      private static
20924        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20925          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable;
20926      private static com.google.protobuf.Descriptors.Descriptor
20927        internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
20928      private static
20929        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20930          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable;
20931      private static com.google.protobuf.Descriptors.Descriptor
20932        internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
20933      private static
20934        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20935          internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable;
20936      private static com.google.protobuf.Descriptors.Descriptor
20937        internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
20938      private static
20939        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20940          internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable;
20941      private static com.google.protobuf.Descriptors.Descriptor
20942        internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
20943      private static
20944        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20945          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable;
20946      private static com.google.protobuf.Descriptors.Descriptor
20947        internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
20948      private static
20949        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20950          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable;
20951      private static com.google.protobuf.Descriptors.Descriptor
20952        internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
20953      private static
20954        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20955          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable;
20956      private static com.google.protobuf.Descriptors.Descriptor
20957        internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
20958      private static
20959        com.google.protobuf.GeneratedMessage.FieldAccessorTable
20960          internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable;
20961    
20962      public static com.google.protobuf.Descriptors.FileDescriptor
20963          getDescriptor() {
20964        return descriptor;
20965      }
20966      private static com.google.protobuf.Descriptors.FileDescriptor
20967          descriptor;
20968      static {
20969        java.lang.String[] descriptorData = {
20970          "\n\rfsimage.proto\022\023hadoop.hdfs.fsimage\032\nhd" +
20971          "fs.proto\032\tacl.proto\"\277\001\n\013FileSummary\022\025\n\ro" +
20972          "ndiskVersion\030\001 \002(\r\022\025\n\rlayoutVersion\030\002 \002(" +
20973          "\r\022\r\n\005codec\030\003 \001(\t\022:\n\010sections\030\004 \003(\0132(.had" +
20974          "oop.hdfs.fsimage.FileSummary.Section\0327\n\007" +
20975          "Section\022\014\n\004name\030\001 \001(\t\022\016\n\006length\030\002 \001(\004\022\016\n" +
20976          "\006offset\030\003 \001(\004\"\277\001\n\021NameSystemSection\022\023\n\013n" +
20977          "amespaceId\030\001 \001(\r\022\022\n\ngenstampV1\030\002 \001(\004\022\022\n\n" +
20978          "genstampV2\030\003 \001(\004\022\027\n\017genstampV1Limit\030\004 \001(" +
20979          "\004\022\034\n\024lastAllocatedBlockId\030\005 \001(\004\022\025\n\rtrans",
20980          "actionId\030\006 \001(\004\022\037\n\027rollingUpgradeStartTim" +
20981          "e\030\007 \001(\004\"\267\010\n\014INodeSection\022\023\n\013lastInodeId\030" +
20982          "\001 \001(\004\022\021\n\tnumInodes\030\002 \001(\004\032I\n\034FileUnderCon" +
20983          "structionFeature\022\022\n\nclientName\030\001 \001(\t\022\025\n\r" +
20984          "clientMachine\030\002 \001(\t\032&\n\017AclFeatureProto\022\023" +
20985          "\n\007entries\030\002 \003(\007B\002\020\001\032\267\002\n\tINodeFile\022\023\n\013rep" +
20986          "lication\030\001 \001(\r\022\030\n\020modificationTime\030\002 \001(\004" +
20987          "\022\022\n\naccessTime\030\003 \001(\004\022\032\n\022preferredBlockSi" +
20988          "ze\030\004 \001(\004\022\022\n\npermission\030\005 \001(\006\022\'\n\006blocks\030\006" +
20989          " \003(\0132\027.hadoop.hdfs.BlockProto\022N\n\006fileUC\030",
20990          "\007 \001(\0132>.hadoop.hdfs.fsimage.INodeSection" +
20991          ".FileUnderConstructionFeature\022>\n\003acl\030\010 \001" +
20992          "(\01321.hadoop.hdfs.fsimage.INodeSection.Ac" +
20993          "lFeatureProto\032\240\001\n\016INodeDirectory\022\030\n\020modi" +
20994          "ficationTime\030\001 \001(\004\022\017\n\007nsQuota\030\002 \001(\004\022\017\n\007d" +
20995          "sQuota\030\003 \001(\004\022\022\n\npermission\030\004 \001(\006\022>\n\003acl\030" +
20996          "\005 \001(\01321.hadoop.hdfs.fsimage.INodeSection" +
20997          ".AclFeatureProto\032`\n\014INodeSymlink\022\022\n\nperm" +
20998          "ission\030\001 \001(\006\022\016\n\006target\030\002 \001(\014\022\030\n\020modifica" +
20999          "tionTime\030\003 \001(\004\022\022\n\naccessTime\030\004 \001(\004\032\314\002\n\005I",
21000          "Node\022:\n\004type\030\001 \002(\0162,.hadoop.hdfs.fsimage" +
21001          ".INodeSection.INode.Type\022\n\n\002id\030\002 \002(\004\022\014\n\004" +
21002          "name\030\003 \001(\014\0229\n\004file\030\004 \001(\0132+.hadoop.hdfs.f" +
21003          "simage.INodeSection.INodeFile\022C\n\tdirecto" +
21004          "ry\030\005 \001(\01320.hadoop.hdfs.fsimage.INodeSect" +
21005          "ion.INodeDirectory\022?\n\007symlink\030\006 \001(\0132..ha" +
21006          "doop.hdfs.fsimage.INodeSection.INodeSyml" +
21007          "ink\",\n\004Type\022\010\n\004FILE\020\001\022\r\n\tDIRECTORY\020\002\022\013\n\007" +
21008          "SYMLINK\020\003\"`\n\035FilesUnderConstructionSecti" +
21009          "on\032?\n\032FileUnderConstructionEntry\022\017\n\007inod",
21010          "eId\030\001 \001(\004\022\020\n\010fullPath\030\002 \001(\t\"b\n\025INodeDire" +
21011          "ctorySection\032I\n\010DirEntry\022\016\n\006parent\030\001 \001(\004" +
21012          "\022\024\n\010children\030\002 \003(\004B\002\020\001\022\027\n\013refChildren\030\003 " +
21013          "\003(\rB\002\020\001\"z\n\025INodeReferenceSection\032a\n\016INod" +
21014          "eReference\022\022\n\nreferredId\030\001 \001(\004\022\014\n\004name\030\002" +
21015          " \001(\014\022\025\n\rdstSnapshotId\030\003 \001(\r\022\026\n\016lastSnaps" +
21016          "hotId\030\004 \001(\r\"\265\001\n\017SnapshotSection\022\027\n\017snaps" +
21017          "hotCounter\030\001 \001(\r\022\034\n\020snapshottableDir\030\002 \003" +
21018          "(\004B\002\020\001\022\024\n\014numSnapshots\030\003 \001(\r\032U\n\010Snapshot" +
21019          "\022\022\n\nsnapshotId\030\001 \001(\r\0225\n\004root\030\002 \001(\0132\'.had",
21020          "oop.hdfs.fsimage.INodeSection.INode\"\327\004\n\023" +
21021          "SnapshotDiffSection\032 \n\020CreatedListEntry\022" +
21022          "\014\n\004name\030\001 \001(\014\032\367\001\n\rDirectoryDiff\022\022\n\nsnaps" +
21023          "hotId\030\001 \001(\r\022\024\n\014childrenSize\030\002 \001(\r\022\026\n\016isS" +
21024          "napshotRoot\030\003 \001(\010\022\014\n\004name\030\004 \001(\014\022F\n\014snaps" +
21025          "hotCopy\030\005 \001(\01320.hadoop.hdfs.fsimage.INod" +
21026          "eSection.INodeDirectory\022\027\n\017createdListSi" +
21027          "ze\030\006 \001(\r\022\030\n\014deletedINode\030\007 \003(\004B\002\020\001\022\033\n\017de" +
21028          "letedINodeRef\030\010 \003(\rB\002\020\001\032\201\001\n\010FileDiff\022\022\n\n" +
21029          "snapshotId\030\001 \001(\r\022\020\n\010fileSize\030\002 \001(\004\022\014\n\004na",
21030          "me\030\003 \001(\014\022A\n\014snapshotCopy\030\004 \001(\0132+.hadoop." +
21031          "hdfs.fsimage.INodeSection.INodeFile\032\237\001\n\t" +
21032          "DiffEntry\022E\n\004type\030\001 \002(\01627.hadoop.hdfs.fs" +
21033          "image.SnapshotDiffSection.DiffEntry.Type" +
21034          "\022\017\n\007inodeId\030\002 \001(\004\022\021\n\tnumOfDiff\030\003 \001(\r\"\'\n\004" +
21035          "Type\022\014\n\010FILEDIFF\020\001\022\021\n\rDIRECTORYDIFF\020\002\"H\n" +
21036          "\022StringTableSection\022\020\n\010numEntry\030\001 \001(\r\032 \n" +
21037          "\005Entry\022\n\n\002id\030\001 \001(\r\022\013\n\003str\030\002 \001(\t\"\341\002\n\024Secr" +
21038          "etManagerSection\022\021\n\tcurrentId\030\001 \001(\r\022\033\n\023t" +
21039          "okenSequenceNumber\030\002 \001(\r\022\017\n\007numKeys\030\003 \001(",
21040          "\r\022\021\n\tnumTokens\030\004 \001(\r\032<\n\rDelegationKey\022\n\n" +
21041          "\002id\030\001 \001(\r\022\022\n\nexpiryDate\030\002 \001(\004\022\013\n\003key\030\003 \001" +
21042          "(\014\032\266\001\n\014PersistToken\022\017\n\007version\030\001 \001(\r\022\r\n\005" +
21043          "owner\030\002 \001(\t\022\017\n\007renewer\030\003 \001(\t\022\020\n\010realUser" +
21044          "\030\004 \001(\t\022\021\n\tissueDate\030\005 \001(\004\022\017\n\007maxDate\030\006 \001" +
21045          "(\004\022\026\n\016sequenceNumber\030\007 \001(\r\022\023\n\013masterKeyI" +
21046          "d\030\010 \001(\r\022\022\n\nexpiryDate\030\t \001(\004\"W\n\023CacheMana" +
21047          "gerSection\022\027\n\017nextDirectiveId\030\001 \002(\004\022\020\n\010n" +
21048          "umPools\030\002 \002(\r\022\025\n\rnumDirectives\030\003 \002(\rB6\n&" +
21049          "org.apache.hadoop.hdfs.server.namenodeB\014",
21050          "FsImageProto"
21051        };
21052        com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
21053          new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
21054            public com.google.protobuf.ExtensionRegistry assignDescriptors(
21055                com.google.protobuf.Descriptors.FileDescriptor root) {
21056              descriptor = root;
21057              internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor =
21058                getDescriptor().getMessageTypes().get(0);
21059              internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable = new
21060                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21061                  internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor,
21062                  new java.lang.String[] { "OndiskVersion", "LayoutVersion", "Codec", "Sections", });
21063              internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor =
21064                internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor.getNestedTypes().get(0);
21065              internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable = new
21066                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21067                  internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor,
21068                  new java.lang.String[] { "Name", "Length", "Offset", });
21069              internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor =
21070                getDescriptor().getMessageTypes().get(1);
21071              internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable = new
21072                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21073                  internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor,
21074                  new java.lang.String[] { "NamespaceId", "GenstampV1", "GenstampV2", "GenstampV1Limit", "LastAllocatedBlockId", "TransactionId", "RollingUpgradeStartTime", });
21075              internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor =
21076                getDescriptor().getMessageTypes().get(2);
21077              internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable = new
21078                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21079                  internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor,
21080                  new java.lang.String[] { "LastInodeId", "NumInodes", });
21081              internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor =
21082                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(0);
21083              internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable = new
21084                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21085                  internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor,
21086                  new java.lang.String[] { "ClientName", "ClientMachine", });
21087              internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor =
21088                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(1);
21089              internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable = new
21090                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21091                  internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor,
21092                  new java.lang.String[] { "Entries", });
21093              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor =
21094                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(2);
21095              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable = new
21096                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21097                  internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor,
21098                  new java.lang.String[] { "Replication", "ModificationTime", "AccessTime", "PreferredBlockSize", "Permission", "Blocks", "FileUC", "Acl", });
21099              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor =
21100                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(3);
21101              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable = new
21102                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21103                  internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor,
21104                  new java.lang.String[] { "ModificationTime", "NsQuota", "DsQuota", "Permission", "Acl", });
21105              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor =
21106                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(4);
21107              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable = new
21108                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21109                  internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor,
21110                  new java.lang.String[] { "Permission", "Target", "ModificationTime", "AccessTime", });
21111              internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor =
21112                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(5);
21113              internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable = new
21114                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21115                  internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor,
21116                  new java.lang.String[] { "Type", "Id", "Name", "File", "Directory", "Symlink", });
21117              internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor =
21118                getDescriptor().getMessageTypes().get(3);
21119              internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable = new
21120                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21121                  internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor,
21122                  new java.lang.String[] { });
21123              internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor =
21124                internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor.getNestedTypes().get(0);
21125              internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable = new
21126                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21127                  internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor,
21128                  new java.lang.String[] { "InodeId", "FullPath", });
21129              internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor =
21130                getDescriptor().getMessageTypes().get(4);
21131              internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable = new
21132                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21133                  internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor,
21134                  new java.lang.String[] { });
21135              internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor =
21136                internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor.getNestedTypes().get(0);
21137              internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable = new
21138                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21139                  internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor,
21140                  new java.lang.String[] { "Parent", "Children", "RefChildren", });
21141              internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor =
21142                getDescriptor().getMessageTypes().get(5);
21143              internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable = new
21144                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21145                  internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor,
21146                  new java.lang.String[] { });
21147              internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor =
21148                internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor.getNestedTypes().get(0);
21149              internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable = new
21150                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21151                  internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor,
21152                  new java.lang.String[] { "ReferredId", "Name", "DstSnapshotId", "LastSnapshotId", });
21153              internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor =
21154                getDescriptor().getMessageTypes().get(6);
21155              internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable = new
21156                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21157                  internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor,
21158                  new java.lang.String[] { "SnapshotCounter", "SnapshottableDir", "NumSnapshots", });
21159              internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor =
21160                internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor.getNestedTypes().get(0);
21161              internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable = new
21162                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21163                  internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor,
21164                  new java.lang.String[] { "SnapshotId", "Root", });
21165              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor =
21166                getDescriptor().getMessageTypes().get(7);
21167              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable = new
21168                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21169                  internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor,
21170                  new java.lang.String[] { });
21171              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor =
21172                internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(0);
21173              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable = new
21174                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21175                  internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor,
21176                  new java.lang.String[] { "Name", });
21177              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor =
21178                internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(1);
21179              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable = new
21180                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21181                  internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor,
21182                  new java.lang.String[] { "SnapshotId", "ChildrenSize", "IsSnapshotRoot", "Name", "SnapshotCopy", "CreatedListSize", "DeletedINode", "DeletedINodeRef", });
21183              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor =
21184                internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(2);
21185              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable = new
21186                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21187                  internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor,
21188                  new java.lang.String[] { "SnapshotId", "FileSize", "Name", "SnapshotCopy", });
21189              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor =
21190                internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(3);
21191              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable = new
21192                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21193                  internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor,
21194                  new java.lang.String[] { "Type", "InodeId", "NumOfDiff", });
21195              internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor =
21196                getDescriptor().getMessageTypes().get(8);
21197              internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable = new
21198                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21199                  internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor,
21200                  new java.lang.String[] { "NumEntry", });
21201              internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor =
21202                internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor.getNestedTypes().get(0);
21203              internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable = new
21204                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21205                  internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor,
21206                  new java.lang.String[] { "Id", "Str", });
21207              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor =
21208                getDescriptor().getMessageTypes().get(9);
21209              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable = new
21210                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21211                  internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor,
21212                  new java.lang.String[] { "CurrentId", "TokenSequenceNumber", "NumKeys", "NumTokens", });
21213              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor =
21214                internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor.getNestedTypes().get(0);
21215              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable = new
21216                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21217                  internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor,
21218                  new java.lang.String[] { "Id", "ExpiryDate", "Key", });
21219              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor =
21220                internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor.getNestedTypes().get(1);
21221              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable = new
21222                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21223                  internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor,
21224                  new java.lang.String[] { "Version", "Owner", "Renewer", "RealUser", "IssueDate", "MaxDate", "SequenceNumber", "MasterKeyId", "ExpiryDate", });
21225              internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor =
21226                getDescriptor().getMessageTypes().get(10);
21227              internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable = new
21228                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
21229                  internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor,
21230                  new java.lang.String[] { "NextDirectiveId", "NumPools", "NumDirectives", });
21231              return null;
21232            }
21233          };
21234        com.google.protobuf.Descriptors.FileDescriptor
21235          .internalBuildGeneratedFileFrom(descriptorData,
21236            new com.google.protobuf.Descriptors.FileDescriptor[] {
21237              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.getDescriptor(),
21238              org.apache.hadoop.hdfs.protocol.proto.AclProtos.getDescriptor(),
21239            }, assigner);
21240      }
21241    
21242      // @@protoc_insertion_point(outer_class_scope)
21243    }