001    // Generated by the protocol buffer compiler.  DO NOT EDIT!
002    // source: fsimage.proto
003    
004    package org.apache.hadoop.hdfs.server.namenode;
005    
006    public final class FsImageProto {
007      private FsImageProto() {}
008      public static void registerAllExtensions(
009          com.google.protobuf.ExtensionRegistry registry) {
010      }
011      public interface FileSummaryOrBuilder
012          extends com.google.protobuf.MessageOrBuilder {
013    
014        // required uint32 ondiskVersion = 1;
015        /**
016         * <code>required uint32 ondiskVersion = 1;</code>
017         *
018         * <pre>
019         * The version of the above EBNF grammars.
020         * </pre>
021         */
022        boolean hasOndiskVersion();
023        /**
024         * <code>required uint32 ondiskVersion = 1;</code>
025         *
026         * <pre>
027         * The version of the above EBNF grammars.
028         * </pre>
029         */
030        int getOndiskVersion();
031    
032        // required uint32 layoutVersion = 2;
033        /**
034         * <code>required uint32 layoutVersion = 2;</code>
035         *
036         * <pre>
037         * layoutVersion describes which features are available in the
038         * FSImage.
039         * </pre>
040         */
041        boolean hasLayoutVersion();
042        /**
043         * <code>required uint32 layoutVersion = 2;</code>
044         *
045         * <pre>
046         * layoutVersion describes which features are available in the
047         * FSImage.
048         * </pre>
049         */
050        int getLayoutVersion();
051    
052        // optional string codec = 3;
053        /**
054         * <code>optional string codec = 3;</code>
055         */
056        boolean hasCodec();
057        /**
058         * <code>optional string codec = 3;</code>
059         */
060        java.lang.String getCodec();
061        /**
062         * <code>optional string codec = 3;</code>
063         */
064        com.google.protobuf.ByteString
065            getCodecBytes();
066    
067        // repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;
068        /**
069         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
070         */
071        java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> 
072            getSectionsList();
073        /**
074         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
075         */
076        org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getSections(int index);
077        /**
078         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
079         */
080        int getSectionsCount();
081        /**
082         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
083         */
084        java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
085            getSectionsOrBuilderList();
086        /**
087         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
088         */
089        org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder getSectionsOrBuilder(
090            int index);
091      }
092      /**
093       * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary}
094       */
095      public static final class FileSummary extends
096          com.google.protobuf.GeneratedMessage
097          implements FileSummaryOrBuilder {
098        // Use FileSummary.newBuilder() to construct.
099        private FileSummary(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
100          super(builder);
101          this.unknownFields = builder.getUnknownFields();
102        }
103        private FileSummary(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
104    
105        private static final FileSummary defaultInstance;
106        public static FileSummary getDefaultInstance() {
107          return defaultInstance;
108        }
109    
110        public FileSummary getDefaultInstanceForType() {
111          return defaultInstance;
112        }
113    
114        private final com.google.protobuf.UnknownFieldSet unknownFields;
115        @java.lang.Override
116        public final com.google.protobuf.UnknownFieldSet
117            getUnknownFields() {
118          return this.unknownFields;
119        }
120        private FileSummary(
121            com.google.protobuf.CodedInputStream input,
122            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
123            throws com.google.protobuf.InvalidProtocolBufferException {
124          initFields();
125          int mutable_bitField0_ = 0;
126          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
127              com.google.protobuf.UnknownFieldSet.newBuilder();
128          try {
129            boolean done = false;
130            while (!done) {
131              int tag = input.readTag();
132              switch (tag) {
133                case 0:
134                  done = true;
135                  break;
136                default: {
137                  if (!parseUnknownField(input, unknownFields,
138                                         extensionRegistry, tag)) {
139                    done = true;
140                  }
141                  break;
142                }
143                case 8: {
144                  bitField0_ |= 0x00000001;
145                  ondiskVersion_ = input.readUInt32();
146                  break;
147                }
148                case 16: {
149                  bitField0_ |= 0x00000002;
150                  layoutVersion_ = input.readUInt32();
151                  break;
152                }
153                case 26: {
154                  bitField0_ |= 0x00000004;
155                  codec_ = input.readBytes();
156                  break;
157                }
158                case 34: {
159                  if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
160                    sections_ = new java.util.ArrayList<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section>();
161                    mutable_bitField0_ |= 0x00000008;
162                  }
163                  sections_.add(input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.PARSER, extensionRegistry));
164                  break;
165                }
166              }
167            }
168          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
169            throw e.setUnfinishedMessage(this);
170          } catch (java.io.IOException e) {
171            throw new com.google.protobuf.InvalidProtocolBufferException(
172                e.getMessage()).setUnfinishedMessage(this);
173          } finally {
174            if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
175              sections_ = java.util.Collections.unmodifiableList(sections_);
176            }
177            this.unknownFields = unknownFields.build();
178            makeExtensionsImmutable();
179          }
180        }
181        public static final com.google.protobuf.Descriptors.Descriptor
182            getDescriptor() {
183          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
184        }
185    
186        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
187            internalGetFieldAccessorTable() {
188          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable
189              .ensureFieldAccessorsInitialized(
190                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Builder.class);
191        }
192    
193        public static com.google.protobuf.Parser<FileSummary> PARSER =
194            new com.google.protobuf.AbstractParser<FileSummary>() {
195          public FileSummary parsePartialFrom(
196              com.google.protobuf.CodedInputStream input,
197              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
198              throws com.google.protobuf.InvalidProtocolBufferException {
199            return new FileSummary(input, extensionRegistry);
200          }
201        };
202    
203        @java.lang.Override
204        public com.google.protobuf.Parser<FileSummary> getParserForType() {
205          return PARSER;
206        }
207    
208        public interface SectionOrBuilder
209            extends com.google.protobuf.MessageOrBuilder {
210    
211          // optional string name = 1;
212          /**
213           * <code>optional string name = 1;</code>
214           */
215          boolean hasName();
216          /**
217           * <code>optional string name = 1;</code>
218           */
219          java.lang.String getName();
220          /**
221           * <code>optional string name = 1;</code>
222           */
223          com.google.protobuf.ByteString
224              getNameBytes();
225    
226          // optional uint64 length = 2;
227          /**
228           * <code>optional uint64 length = 2;</code>
229           */
230          boolean hasLength();
231          /**
232           * <code>optional uint64 length = 2;</code>
233           */
234          long getLength();
235    
236          // optional uint64 offset = 3;
237          /**
238           * <code>optional uint64 offset = 3;</code>
239           */
240          boolean hasOffset();
241          /**
242           * <code>optional uint64 offset = 3;</code>
243           */
244          long getOffset();
245        }
246        /**
247         * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary.Section}
248         *
249         * <pre>
250         * index for each section
251         * </pre>
252         */
253        public static final class Section extends
254            com.google.protobuf.GeneratedMessage
255            implements SectionOrBuilder {
256          // Use Section.newBuilder() to construct.
257          private Section(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
258            super(builder);
259            this.unknownFields = builder.getUnknownFields();
260          }
261          private Section(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
262    
263          private static final Section defaultInstance;
264          public static Section getDefaultInstance() {
265            return defaultInstance;
266          }
267    
268          public Section getDefaultInstanceForType() {
269            return defaultInstance;
270          }
271    
272          private final com.google.protobuf.UnknownFieldSet unknownFields;
273          @java.lang.Override
274          public final com.google.protobuf.UnknownFieldSet
275              getUnknownFields() {
276            return this.unknownFields;
277          }
278          private Section(
279              com.google.protobuf.CodedInputStream input,
280              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
281              throws com.google.protobuf.InvalidProtocolBufferException {
282            initFields();
283            int mutable_bitField0_ = 0;
284            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
285                com.google.protobuf.UnknownFieldSet.newBuilder();
286            try {
287              boolean done = false;
288              while (!done) {
289                int tag = input.readTag();
290                switch (tag) {
291                  case 0:
292                    done = true;
293                    break;
294                  default: {
295                    if (!parseUnknownField(input, unknownFields,
296                                           extensionRegistry, tag)) {
297                      done = true;
298                    }
299                    break;
300                  }
301                  case 10: {
302                    bitField0_ |= 0x00000001;
303                    name_ = input.readBytes();
304                    break;
305                  }
306                  case 16: {
307                    bitField0_ |= 0x00000002;
308                    length_ = input.readUInt64();
309                    break;
310                  }
311                  case 24: {
312                    bitField0_ |= 0x00000004;
313                    offset_ = input.readUInt64();
314                    break;
315                  }
316                }
317              }
318            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
319              throw e.setUnfinishedMessage(this);
320            } catch (java.io.IOException e) {
321              throw new com.google.protobuf.InvalidProtocolBufferException(
322                  e.getMessage()).setUnfinishedMessage(this);
323            } finally {
324              this.unknownFields = unknownFields.build();
325              makeExtensionsImmutable();
326            }
327          }
328          public static final com.google.protobuf.Descriptors.Descriptor
329              getDescriptor() {
330            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
331          }
332    
333          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
334              internalGetFieldAccessorTable() {
335            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable
336                .ensureFieldAccessorsInitialized(
337                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder.class);
338          }
339    
340          public static com.google.protobuf.Parser<Section> PARSER =
341              new com.google.protobuf.AbstractParser<Section>() {
342            public Section parsePartialFrom(
343                com.google.protobuf.CodedInputStream input,
344                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
345                throws com.google.protobuf.InvalidProtocolBufferException {
346              return new Section(input, extensionRegistry);
347            }
348          };
349    
350          @java.lang.Override
351          public com.google.protobuf.Parser<Section> getParserForType() {
352            return PARSER;
353          }
354    
355          private int bitField0_;
356          // optional string name = 1;
357          public static final int NAME_FIELD_NUMBER = 1;
358          private java.lang.Object name_;
359          /**
360           * <code>optional string name = 1;</code>
361           */
362          public boolean hasName() {
363            return ((bitField0_ & 0x00000001) == 0x00000001);
364          }
365          /**
366           * <code>optional string name = 1;</code>
367           */
368          public java.lang.String getName() {
369            java.lang.Object ref = name_;
370            if (ref instanceof java.lang.String) {
371              return (java.lang.String) ref;
372            } else {
373              com.google.protobuf.ByteString bs = 
374                  (com.google.protobuf.ByteString) ref;
375              java.lang.String s = bs.toStringUtf8();
376              if (bs.isValidUtf8()) {
377                name_ = s;
378              }
379              return s;
380            }
381          }
382          /**
383           * <code>optional string name = 1;</code>
384           */
385          public com.google.protobuf.ByteString
386              getNameBytes() {
387            java.lang.Object ref = name_;
388            if (ref instanceof java.lang.String) {
389              com.google.protobuf.ByteString b = 
390                  com.google.protobuf.ByteString.copyFromUtf8(
391                      (java.lang.String) ref);
392              name_ = b;
393              return b;
394            } else {
395              return (com.google.protobuf.ByteString) ref;
396            }
397          }
398    
399          // optional uint64 length = 2;
400          public static final int LENGTH_FIELD_NUMBER = 2;
401          private long length_;
402          /**
403           * <code>optional uint64 length = 2;</code>
404           */
405          public boolean hasLength() {
406            return ((bitField0_ & 0x00000002) == 0x00000002);
407          }
408          /**
409           * <code>optional uint64 length = 2;</code>
410           */
411          public long getLength() {
412            return length_;
413          }
414    
415          // optional uint64 offset = 3;
416          public static final int OFFSET_FIELD_NUMBER = 3;
417          private long offset_;
418          /**
419           * <code>optional uint64 offset = 3;</code>
420           */
421          public boolean hasOffset() {
422            return ((bitField0_ & 0x00000004) == 0x00000004);
423          }
424          /**
425           * <code>optional uint64 offset = 3;</code>
426           */
427          public long getOffset() {
428            return offset_;
429          }
430    
431          private void initFields() {
432            name_ = "";
433            length_ = 0L;
434            offset_ = 0L;
435          }
436          private byte memoizedIsInitialized = -1;
437          public final boolean isInitialized() {
438            byte isInitialized = memoizedIsInitialized;
439            if (isInitialized != -1) return isInitialized == 1;
440    
441            memoizedIsInitialized = 1;
442            return true;
443          }
444    
445          public void writeTo(com.google.protobuf.CodedOutputStream output)
446                              throws java.io.IOException {
447            getSerializedSize();
448            if (((bitField0_ & 0x00000001) == 0x00000001)) {
449              output.writeBytes(1, getNameBytes());
450            }
451            if (((bitField0_ & 0x00000002) == 0x00000002)) {
452              output.writeUInt64(2, length_);
453            }
454            if (((bitField0_ & 0x00000004) == 0x00000004)) {
455              output.writeUInt64(3, offset_);
456            }
457            getUnknownFields().writeTo(output);
458          }
459    
460          private int memoizedSerializedSize = -1;
461          public int getSerializedSize() {
462            int size = memoizedSerializedSize;
463            if (size != -1) return size;
464    
465            size = 0;
466            if (((bitField0_ & 0x00000001) == 0x00000001)) {
467              size += com.google.protobuf.CodedOutputStream
468                .computeBytesSize(1, getNameBytes());
469            }
470            if (((bitField0_ & 0x00000002) == 0x00000002)) {
471              size += com.google.protobuf.CodedOutputStream
472                .computeUInt64Size(2, length_);
473            }
474            if (((bitField0_ & 0x00000004) == 0x00000004)) {
475              size += com.google.protobuf.CodedOutputStream
476                .computeUInt64Size(3, offset_);
477            }
478            size += getUnknownFields().getSerializedSize();
479            memoizedSerializedSize = size;
480            return size;
481          }
482    
483          private static final long serialVersionUID = 0L;
484          @java.lang.Override
485          protected java.lang.Object writeReplace()
486              throws java.io.ObjectStreamException {
487            return super.writeReplace();
488          }
489    
490          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
491              com.google.protobuf.ByteString data)
492              throws com.google.protobuf.InvalidProtocolBufferException {
493            return PARSER.parseFrom(data);
494          }
495          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
496              com.google.protobuf.ByteString data,
497              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
498              throws com.google.protobuf.InvalidProtocolBufferException {
499            return PARSER.parseFrom(data, extensionRegistry);
500          }
501          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(byte[] data)
502              throws com.google.protobuf.InvalidProtocolBufferException {
503            return PARSER.parseFrom(data);
504          }
505          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
506              byte[] data,
507              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
508              throws com.google.protobuf.InvalidProtocolBufferException {
509            return PARSER.parseFrom(data, extensionRegistry);
510          }
511          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(java.io.InputStream input)
512              throws java.io.IOException {
513            return PARSER.parseFrom(input);
514          }
515          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
516              java.io.InputStream input,
517              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
518              throws java.io.IOException {
519            return PARSER.parseFrom(input, extensionRegistry);
520          }
521          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseDelimitedFrom(java.io.InputStream input)
522              throws java.io.IOException {
523            return PARSER.parseDelimitedFrom(input);
524          }
525          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseDelimitedFrom(
526              java.io.InputStream input,
527              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
528              throws java.io.IOException {
529            return PARSER.parseDelimitedFrom(input, extensionRegistry);
530          }
531          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
532              com.google.protobuf.CodedInputStream input)
533              throws java.io.IOException {
534            return PARSER.parseFrom(input);
535          }
536          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
537              com.google.protobuf.CodedInputStream input,
538              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
539              throws java.io.IOException {
540            return PARSER.parseFrom(input, extensionRegistry);
541          }
542    
543          public static Builder newBuilder() { return Builder.create(); }
544          public Builder newBuilderForType() { return newBuilder(); }
545          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section prototype) {
546            return newBuilder().mergeFrom(prototype);
547          }
548          public Builder toBuilder() { return newBuilder(this); }
549    
550          @java.lang.Override
551          protected Builder newBuilderForType(
552              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
553            Builder builder = new Builder(parent);
554            return builder;
555          }
556          /**
557           * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary.Section}
558           *
559           * <pre>
560           * index for each section
561           * </pre>
562           */
563          public static final class Builder extends
564              com.google.protobuf.GeneratedMessage.Builder<Builder>
565             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder {
566            public static final com.google.protobuf.Descriptors.Descriptor
567                getDescriptor() {
568              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
569            }
570    
571            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
572                internalGetFieldAccessorTable() {
573              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable
574                  .ensureFieldAccessorsInitialized(
575                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder.class);
576            }
577    
578            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.newBuilder()
579            private Builder() {
580              maybeForceBuilderInitialization();
581            }
582    
583            private Builder(
584                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
585              super(parent);
586              maybeForceBuilderInitialization();
587            }
588            private void maybeForceBuilderInitialization() {
589              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
590              }
591            }
592            private static Builder create() {
593              return new Builder();
594            }
595    
596            public Builder clear() {
597              super.clear();
598              name_ = "";
599              bitField0_ = (bitField0_ & ~0x00000001);
600              length_ = 0L;
601              bitField0_ = (bitField0_ & ~0x00000002);
602              offset_ = 0L;
603              bitField0_ = (bitField0_ & ~0x00000004);
604              return this;
605            }
606    
607            public Builder clone() {
608              return create().mergeFrom(buildPartial());
609            }
610    
611            public com.google.protobuf.Descriptors.Descriptor
612                getDescriptorForType() {
613              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
614            }
615    
616            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getDefaultInstanceForType() {
617              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance();
618            }
619    
620            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section build() {
621              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section result = buildPartial();
622              if (!result.isInitialized()) {
623                throw newUninitializedMessageException(result);
624              }
625              return result;
626            }
627    
628            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section buildPartial() {
629              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section(this);
630              int from_bitField0_ = bitField0_;
631              int to_bitField0_ = 0;
632              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
633                to_bitField0_ |= 0x00000001;
634              }
635              result.name_ = name_;
636              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
637                to_bitField0_ |= 0x00000002;
638              }
639              result.length_ = length_;
640              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
641                to_bitField0_ |= 0x00000004;
642              }
643              result.offset_ = offset_;
644              result.bitField0_ = to_bitField0_;
645              onBuilt();
646              return result;
647            }
648    
649            public Builder mergeFrom(com.google.protobuf.Message other) {
650              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section) {
651                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section)other);
652              } else {
653                super.mergeFrom(other);
654                return this;
655              }
656            }
657    
658            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section other) {
659              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance()) return this;
660              if (other.hasName()) {
661                bitField0_ |= 0x00000001;
662                name_ = other.name_;
663                onChanged();
664              }
665              if (other.hasLength()) {
666                setLength(other.getLength());
667              }
668              if (other.hasOffset()) {
669                setOffset(other.getOffset());
670              }
671              this.mergeUnknownFields(other.getUnknownFields());
672              return this;
673            }
674    
675            public final boolean isInitialized() {
676              return true;
677            }
678    
679            public Builder mergeFrom(
680                com.google.protobuf.CodedInputStream input,
681                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
682                throws java.io.IOException {
683              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parsedMessage = null;
684              try {
685                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
686              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
687                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section) e.getUnfinishedMessage();
688                throw e;
689              } finally {
690                if (parsedMessage != null) {
691                  mergeFrom(parsedMessage);
692                }
693              }
694              return this;
695            }
696            private int bitField0_;
697    
698            // optional string name = 1;
699            private java.lang.Object name_ = "";
700            /**
701             * <code>optional string name = 1;</code>
702             */
703            public boolean hasName() {
704              return ((bitField0_ & 0x00000001) == 0x00000001);
705            }
706            /**
707             * <code>optional string name = 1;</code>
708             */
709            public java.lang.String getName() {
710              java.lang.Object ref = name_;
711              if (!(ref instanceof java.lang.String)) {
712                java.lang.String s = ((com.google.protobuf.ByteString) ref)
713                    .toStringUtf8();
714                name_ = s;
715                return s;
716              } else {
717                return (java.lang.String) ref;
718              }
719            }
720            /**
721             * <code>optional string name = 1;</code>
722             */
723            public com.google.protobuf.ByteString
724                getNameBytes() {
725              java.lang.Object ref = name_;
726              if (ref instanceof String) {
727                com.google.protobuf.ByteString b = 
728                    com.google.protobuf.ByteString.copyFromUtf8(
729                        (java.lang.String) ref);
730                name_ = b;
731                return b;
732              } else {
733                return (com.google.protobuf.ByteString) ref;
734              }
735            }
736            /**
737             * <code>optional string name = 1;</code>
738             */
739            public Builder setName(
740                java.lang.String value) {
741              if (value == null) {
742        throw new NullPointerException();
743      }
744      bitField0_ |= 0x00000001;
745              name_ = value;
746              onChanged();
747              return this;
748            }
749            /**
750             * <code>optional string name = 1;</code>
751             */
752            public Builder clearName() {
753              bitField0_ = (bitField0_ & ~0x00000001);
754              name_ = getDefaultInstance().getName();
755              onChanged();
756              return this;
757            }
758            /**
759             * <code>optional string name = 1;</code>
760             */
761            public Builder setNameBytes(
762                com.google.protobuf.ByteString value) {
763              if (value == null) {
764        throw new NullPointerException();
765      }
766      bitField0_ |= 0x00000001;
767              name_ = value;
768              onChanged();
769              return this;
770            }
771    
772            // optional uint64 length = 2;
773            private long length_ ;
774            /**
775             * <code>optional uint64 length = 2;</code>
776             */
777            public boolean hasLength() {
778              return ((bitField0_ & 0x00000002) == 0x00000002);
779            }
780            /**
781             * <code>optional uint64 length = 2;</code>
782             */
783            public long getLength() {
784              return length_;
785            }
786            /**
787             * <code>optional uint64 length = 2;</code>
788             */
789            public Builder setLength(long value) {
790              bitField0_ |= 0x00000002;
791              length_ = value;
792              onChanged();
793              return this;
794            }
795            /**
796             * <code>optional uint64 length = 2;</code>
797             */
798            public Builder clearLength() {
799              bitField0_ = (bitField0_ & ~0x00000002);
800              length_ = 0L;
801              onChanged();
802              return this;
803            }
804    
805            // optional uint64 offset = 3;
806            private long offset_ ;
807            /**
808             * <code>optional uint64 offset = 3;</code>
809             */
810            public boolean hasOffset() {
811              return ((bitField0_ & 0x00000004) == 0x00000004);
812            }
813            /**
814             * <code>optional uint64 offset = 3;</code>
815             */
816            public long getOffset() {
817              return offset_;
818            }
819            /**
820             * <code>optional uint64 offset = 3;</code>
821             */
822            public Builder setOffset(long value) {
823              bitField0_ |= 0x00000004;
824              offset_ = value;
825              onChanged();
826              return this;
827            }
828            /**
829             * <code>optional uint64 offset = 3;</code>
830             */
831            public Builder clearOffset() {
832              bitField0_ = (bitField0_ & ~0x00000004);
833              offset_ = 0L;
834              onChanged();
835              return this;
836            }
837    
838            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FileSummary.Section)
839          }
840    
841          static {
842            defaultInstance = new Section(true);
843            defaultInstance.initFields();
844          }
845    
846          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FileSummary.Section)
847        }
848    
849        private int bitField0_;
850        // required uint32 ondiskVersion = 1;
851        public static final int ONDISKVERSION_FIELD_NUMBER = 1;
852        private int ondiskVersion_;
853        /**
854         * <code>required uint32 ondiskVersion = 1;</code>
855         *
856         * <pre>
857         * The version of the above EBNF grammars.
858         * </pre>
859         */
860        public boolean hasOndiskVersion() {
861          return ((bitField0_ & 0x00000001) == 0x00000001);
862        }
863        /**
864         * <code>required uint32 ondiskVersion = 1;</code>
865         *
866         * <pre>
867         * The version of the above EBNF grammars.
868         * </pre>
869         */
870        public int getOndiskVersion() {
871          return ondiskVersion_;
872        }
873    
874        // required uint32 layoutVersion = 2;
875        public static final int LAYOUTVERSION_FIELD_NUMBER = 2;
876        private int layoutVersion_;
877        /**
878         * <code>required uint32 layoutVersion = 2;</code>
879         *
880         * <pre>
881         * layoutVersion describes which features are available in the
882         * FSImage.
883         * </pre>
884         */
885        public boolean hasLayoutVersion() {
886          return ((bitField0_ & 0x00000002) == 0x00000002);
887        }
888        /**
889         * <code>required uint32 layoutVersion = 2;</code>
890         *
891         * <pre>
892         * layoutVersion describes which features are available in the
893         * FSImage.
894         * </pre>
895         */
896        public int getLayoutVersion() {
897          return layoutVersion_;
898        }
899    
900        // optional string codec = 3;
901        public static final int CODEC_FIELD_NUMBER = 3;
902        private java.lang.Object codec_;
903        /**
904         * <code>optional string codec = 3;</code>
905         */
906        public boolean hasCodec() {
907          return ((bitField0_ & 0x00000004) == 0x00000004);
908        }
909        /**
910         * <code>optional string codec = 3;</code>
911         */
912        public java.lang.String getCodec() {
913          java.lang.Object ref = codec_;
914          if (ref instanceof java.lang.String) {
915            return (java.lang.String) ref;
916          } else {
917            com.google.protobuf.ByteString bs = 
918                (com.google.protobuf.ByteString) ref;
919            java.lang.String s = bs.toStringUtf8();
920            if (bs.isValidUtf8()) {
921              codec_ = s;
922            }
923            return s;
924          }
925        }
926        /**
927         * <code>optional string codec = 3;</code>
928         */
929        public com.google.protobuf.ByteString
930            getCodecBytes() {
931          java.lang.Object ref = codec_;
932          if (ref instanceof java.lang.String) {
933            com.google.protobuf.ByteString b = 
934                com.google.protobuf.ByteString.copyFromUtf8(
935                    (java.lang.String) ref);
936            codec_ = b;
937            return b;
938          } else {
939            return (com.google.protobuf.ByteString) ref;
940          }
941        }
942    
943        // repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;
944        public static final int SECTIONS_FIELD_NUMBER = 4;
945        private java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> sections_;
946        /**
947         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
948         */
949        public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> getSectionsList() {
950          return sections_;
951        }
952        /**
953         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
954         */
955        public java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
956            getSectionsOrBuilderList() {
957          return sections_;
958        }
959        /**
960         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
961         */
962        public int getSectionsCount() {
963          return sections_.size();
964        }
965        /**
966         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
967         */
968        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getSections(int index) {
969          return sections_.get(index);
970        }
971        /**
972         * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
973         */
974        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder getSectionsOrBuilder(
975            int index) {
976          return sections_.get(index);
977        }
978    
979        private void initFields() {
980          ondiskVersion_ = 0;
981          layoutVersion_ = 0;
982          codec_ = "";
983          sections_ = java.util.Collections.emptyList();
984        }
985        private byte memoizedIsInitialized = -1;
986        public final boolean isInitialized() {
987          byte isInitialized = memoizedIsInitialized;
988          if (isInitialized != -1) return isInitialized == 1;
989    
990          if (!hasOndiskVersion()) {
991            memoizedIsInitialized = 0;
992            return false;
993          }
994          if (!hasLayoutVersion()) {
995            memoizedIsInitialized = 0;
996            return false;
997          }
998          memoizedIsInitialized = 1;
999          return true;
1000        }
1001    
1002        public void writeTo(com.google.protobuf.CodedOutputStream output)
1003                            throws java.io.IOException {
1004          getSerializedSize();
1005          if (((bitField0_ & 0x00000001) == 0x00000001)) {
1006            output.writeUInt32(1, ondiskVersion_);
1007          }
1008          if (((bitField0_ & 0x00000002) == 0x00000002)) {
1009            output.writeUInt32(2, layoutVersion_);
1010          }
1011          if (((bitField0_ & 0x00000004) == 0x00000004)) {
1012            output.writeBytes(3, getCodecBytes());
1013          }
1014          for (int i = 0; i < sections_.size(); i++) {
1015            output.writeMessage(4, sections_.get(i));
1016          }
1017          getUnknownFields().writeTo(output);
1018        }
1019    
1020        private int memoizedSerializedSize = -1;
1021        public int getSerializedSize() {
1022          int size = memoizedSerializedSize;
1023          if (size != -1) return size;
1024    
1025          size = 0;
1026          if (((bitField0_ & 0x00000001) == 0x00000001)) {
1027            size += com.google.protobuf.CodedOutputStream
1028              .computeUInt32Size(1, ondiskVersion_);
1029          }
1030          if (((bitField0_ & 0x00000002) == 0x00000002)) {
1031            size += com.google.protobuf.CodedOutputStream
1032              .computeUInt32Size(2, layoutVersion_);
1033          }
1034          if (((bitField0_ & 0x00000004) == 0x00000004)) {
1035            size += com.google.protobuf.CodedOutputStream
1036              .computeBytesSize(3, getCodecBytes());
1037          }
1038          for (int i = 0; i < sections_.size(); i++) {
1039            size += com.google.protobuf.CodedOutputStream
1040              .computeMessageSize(4, sections_.get(i));
1041          }
1042          size += getUnknownFields().getSerializedSize();
1043          memoizedSerializedSize = size;
1044          return size;
1045        }
1046    
1047        private static final long serialVersionUID = 0L;
1048        @java.lang.Override
1049        protected java.lang.Object writeReplace()
1050            throws java.io.ObjectStreamException {
1051          return super.writeReplace();
1052        }
1053    
1054        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1055            com.google.protobuf.ByteString data)
1056            throws com.google.protobuf.InvalidProtocolBufferException {
1057          return PARSER.parseFrom(data);
1058        }
1059        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1060            com.google.protobuf.ByteString data,
1061            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1062            throws com.google.protobuf.InvalidProtocolBufferException {
1063          return PARSER.parseFrom(data, extensionRegistry);
1064        }
1065        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(byte[] data)
1066            throws com.google.protobuf.InvalidProtocolBufferException {
1067          return PARSER.parseFrom(data);
1068        }
1069        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1070            byte[] data,
1071            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1072            throws com.google.protobuf.InvalidProtocolBufferException {
1073          return PARSER.parseFrom(data, extensionRegistry);
1074        }
1075        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(java.io.InputStream input)
1076            throws java.io.IOException {
1077          return PARSER.parseFrom(input);
1078        }
1079        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1080            java.io.InputStream input,
1081            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1082            throws java.io.IOException {
1083          return PARSER.parseFrom(input, extensionRegistry);
1084        }
1085        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseDelimitedFrom(java.io.InputStream input)
1086            throws java.io.IOException {
1087          return PARSER.parseDelimitedFrom(input);
1088        }
1089        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseDelimitedFrom(
1090            java.io.InputStream input,
1091            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1092            throws java.io.IOException {
1093          return PARSER.parseDelimitedFrom(input, extensionRegistry);
1094        }
1095        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1096            com.google.protobuf.CodedInputStream input)
1097            throws java.io.IOException {
1098          return PARSER.parseFrom(input);
1099        }
1100        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1101            com.google.protobuf.CodedInputStream input,
1102            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1103            throws java.io.IOException {
1104          return PARSER.parseFrom(input, extensionRegistry);
1105        }
1106    
1107        public static Builder newBuilder() { return Builder.create(); }
1108        public Builder newBuilderForType() { return newBuilder(); }
1109        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary prototype) {
1110          return newBuilder().mergeFrom(prototype);
1111        }
1112        public Builder toBuilder() { return newBuilder(this); }
1113    
1114        @java.lang.Override
1115        protected Builder newBuilderForType(
1116            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1117          Builder builder = new Builder(parent);
1118          return builder;
1119        }
1120        /**
1121         * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary}
1122         */
1123        public static final class Builder extends
1124            com.google.protobuf.GeneratedMessage.Builder<Builder>
1125           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummaryOrBuilder {
1126          public static final com.google.protobuf.Descriptors.Descriptor
1127              getDescriptor() {
1128            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
1129          }
1130    
1131          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1132              internalGetFieldAccessorTable() {
1133            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable
1134                .ensureFieldAccessorsInitialized(
1135                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Builder.class);
1136          }
1137    
1138          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.newBuilder()
1139          private Builder() {
1140            maybeForceBuilderInitialization();
1141          }
1142    
1143          private Builder(
1144              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1145            super(parent);
1146            maybeForceBuilderInitialization();
1147          }
1148          private void maybeForceBuilderInitialization() {
1149            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1150              getSectionsFieldBuilder();
1151            }
1152          }
1153          private static Builder create() {
1154            return new Builder();
1155          }
1156    
1157          public Builder clear() {
1158            super.clear();
1159            ondiskVersion_ = 0;
1160            bitField0_ = (bitField0_ & ~0x00000001);
1161            layoutVersion_ = 0;
1162            bitField0_ = (bitField0_ & ~0x00000002);
1163            codec_ = "";
1164            bitField0_ = (bitField0_ & ~0x00000004);
1165            if (sectionsBuilder_ == null) {
1166              sections_ = java.util.Collections.emptyList();
1167              bitField0_ = (bitField0_ & ~0x00000008);
1168            } else {
1169              sectionsBuilder_.clear();
1170            }
1171            return this;
1172          }
1173    
1174          public Builder clone() {
1175            return create().mergeFrom(buildPartial());
1176          }
1177    
1178          public com.google.protobuf.Descriptors.Descriptor
1179              getDescriptorForType() {
1180            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
1181          }
1182    
1183          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary getDefaultInstanceForType() {
1184            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.getDefaultInstance();
1185          }
1186    
1187          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary build() {
1188            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary result = buildPartial();
1189            if (!result.isInitialized()) {
1190              throw newUninitializedMessageException(result);
1191            }
1192            return result;
1193          }
1194    
1195          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary buildPartial() {
1196            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary(this);
1197            int from_bitField0_ = bitField0_;
1198            int to_bitField0_ = 0;
1199            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1200              to_bitField0_ |= 0x00000001;
1201            }
1202            result.ondiskVersion_ = ondiskVersion_;
1203            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1204              to_bitField0_ |= 0x00000002;
1205            }
1206            result.layoutVersion_ = layoutVersion_;
1207            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
1208              to_bitField0_ |= 0x00000004;
1209            }
1210            result.codec_ = codec_;
1211            if (sectionsBuilder_ == null) {
1212              if (((bitField0_ & 0x00000008) == 0x00000008)) {
1213                sections_ = java.util.Collections.unmodifiableList(sections_);
1214                bitField0_ = (bitField0_ & ~0x00000008);
1215              }
1216              result.sections_ = sections_;
1217            } else {
1218              result.sections_ = sectionsBuilder_.build();
1219            }
1220            result.bitField0_ = to_bitField0_;
1221            onBuilt();
1222            return result;
1223          }
1224    
1225          public Builder mergeFrom(com.google.protobuf.Message other) {
1226            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) {
1227              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary)other);
1228            } else {
1229              super.mergeFrom(other);
1230              return this;
1231            }
1232          }
1233    
1234          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary other) {
1235            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.getDefaultInstance()) return this;
1236            if (other.hasOndiskVersion()) {
1237              setOndiskVersion(other.getOndiskVersion());
1238            }
1239            if (other.hasLayoutVersion()) {
1240              setLayoutVersion(other.getLayoutVersion());
1241            }
1242            if (other.hasCodec()) {
1243              bitField0_ |= 0x00000004;
1244              codec_ = other.codec_;
1245              onChanged();
1246            }
1247            if (sectionsBuilder_ == null) {
1248              if (!other.sections_.isEmpty()) {
1249                if (sections_.isEmpty()) {
1250                  sections_ = other.sections_;
1251                  bitField0_ = (bitField0_ & ~0x00000008);
1252                } else {
1253                  ensureSectionsIsMutable();
1254                  sections_.addAll(other.sections_);
1255                }
1256                onChanged();
1257              }
1258            } else {
1259              if (!other.sections_.isEmpty()) {
1260                if (sectionsBuilder_.isEmpty()) {
1261                  sectionsBuilder_.dispose();
1262                  sectionsBuilder_ = null;
1263                  sections_ = other.sections_;
1264                  bitField0_ = (bitField0_ & ~0x00000008);
1265                  sectionsBuilder_ = 
1266                    com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
1267                       getSectionsFieldBuilder() : null;
1268                } else {
1269                  sectionsBuilder_.addAllMessages(other.sections_);
1270                }
1271              }
1272            }
1273            this.mergeUnknownFields(other.getUnknownFields());
1274            return this;
1275          }
1276    
1277          public final boolean isInitialized() {
1278            if (!hasOndiskVersion()) {
1279              
1280              return false;
1281            }
1282            if (!hasLayoutVersion()) {
1283              
1284              return false;
1285            }
1286            return true;
1287          }
1288    
1289          public Builder mergeFrom(
1290              com.google.protobuf.CodedInputStream input,
1291              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1292              throws java.io.IOException {
1293            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parsedMessage = null;
1294            try {
1295              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1296            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1297              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) e.getUnfinishedMessage();
1298              throw e;
1299            } finally {
1300              if (parsedMessage != null) {
1301                mergeFrom(parsedMessage);
1302              }
1303            }
1304            return this;
1305          }
1306          private int bitField0_;
1307    
1308          // required uint32 ondiskVersion = 1;
1309          private int ondiskVersion_ ;
1310          /**
1311           * <code>required uint32 ondiskVersion = 1;</code>
1312           *
1313           * <pre>
1314           * The version of the above EBNF grammars.
1315           * </pre>
1316           */
1317          public boolean hasOndiskVersion() {
1318            return ((bitField0_ & 0x00000001) == 0x00000001);
1319          }
1320          /**
1321           * <code>required uint32 ondiskVersion = 1;</code>
1322           *
1323           * <pre>
1324           * The version of the above EBNF grammars.
1325           * </pre>
1326           */
1327          public int getOndiskVersion() {
1328            return ondiskVersion_;
1329          }
1330          /**
1331           * <code>required uint32 ondiskVersion = 1;</code>
1332           *
1333           * <pre>
1334           * The version of the above EBNF grammars.
1335           * </pre>
1336           */
1337          public Builder setOndiskVersion(int value) {
1338            bitField0_ |= 0x00000001;
1339            ondiskVersion_ = value;
1340            onChanged();
1341            return this;
1342          }
1343          /**
1344           * <code>required uint32 ondiskVersion = 1;</code>
1345           *
1346           * <pre>
1347           * The version of the above EBNF grammars.
1348           * </pre>
1349           */
1350          public Builder clearOndiskVersion() {
1351            bitField0_ = (bitField0_ & ~0x00000001);
1352            ondiskVersion_ = 0;
1353            onChanged();
1354            return this;
1355          }
1356    
1357          // required uint32 layoutVersion = 2;
1358          private int layoutVersion_ ;
1359          /**
1360           * <code>required uint32 layoutVersion = 2;</code>
1361           *
1362           * <pre>
1363           * layoutVersion describes which features are available in the
1364           * FSImage.
1365           * </pre>
1366           */
1367          public boolean hasLayoutVersion() {
1368            return ((bitField0_ & 0x00000002) == 0x00000002);
1369          }
1370          /**
1371           * <code>required uint32 layoutVersion = 2;</code>
1372           *
1373           * <pre>
1374           * layoutVersion describes which features are available in the
1375           * FSImage.
1376           * </pre>
1377           */
1378          public int getLayoutVersion() {
1379            return layoutVersion_;
1380          }
1381          /**
1382           * <code>required uint32 layoutVersion = 2;</code>
1383           *
1384           * <pre>
1385           * layoutVersion describes which features are available in the
1386           * FSImage.
1387           * </pre>
1388           */
1389          public Builder setLayoutVersion(int value) {
1390            bitField0_ |= 0x00000002;
1391            layoutVersion_ = value;
1392            onChanged();
1393            return this;
1394          }
1395          /**
1396           * <code>required uint32 layoutVersion = 2;</code>
1397           *
1398           * <pre>
1399           * layoutVersion describes which features are available in the
1400           * FSImage.
1401           * </pre>
1402           */
1403          public Builder clearLayoutVersion() {
1404            bitField0_ = (bitField0_ & ~0x00000002);
1405            layoutVersion_ = 0;
1406            onChanged();
1407            return this;
1408          }
1409    
1410          // optional string codec = 3;
1411          private java.lang.Object codec_ = "";
1412          /**
1413           * <code>optional string codec = 3;</code>
1414           */
1415          public boolean hasCodec() {
1416            return ((bitField0_ & 0x00000004) == 0x00000004);
1417          }
1418          /**
1419           * <code>optional string codec = 3;</code>
1420           */
1421          public java.lang.String getCodec() {
1422            java.lang.Object ref = codec_;
1423            if (!(ref instanceof java.lang.String)) {
1424              java.lang.String s = ((com.google.protobuf.ByteString) ref)
1425                  .toStringUtf8();
1426              codec_ = s;
1427              return s;
1428            } else {
1429              return (java.lang.String) ref;
1430            }
1431          }
1432          /**
1433           * <code>optional string codec = 3;</code>
1434           */
1435          public com.google.protobuf.ByteString
1436              getCodecBytes() {
1437            java.lang.Object ref = codec_;
1438            if (ref instanceof String) {
1439              com.google.protobuf.ByteString b = 
1440                  com.google.protobuf.ByteString.copyFromUtf8(
1441                      (java.lang.String) ref);
1442              codec_ = b;
1443              return b;
1444            } else {
1445              return (com.google.protobuf.ByteString) ref;
1446            }
1447          }
1448          /**
1449           * <code>optional string codec = 3;</code>
1450           */
1451          public Builder setCodec(
1452              java.lang.String value) {
1453            if (value == null) {
1454        throw new NullPointerException();
1455      }
1456      bitField0_ |= 0x00000004;
1457            codec_ = value;
1458            onChanged();
1459            return this;
1460          }
1461          /**
1462           * <code>optional string codec = 3;</code>
1463           */
1464          public Builder clearCodec() {
1465            bitField0_ = (bitField0_ & ~0x00000004);
1466            codec_ = getDefaultInstance().getCodec();
1467            onChanged();
1468            return this;
1469          }
1470          /**
1471           * <code>optional string codec = 3;</code>
1472           */
1473          public Builder setCodecBytes(
1474              com.google.protobuf.ByteString value) {
1475            if (value == null) {
1476        throw new NullPointerException();
1477      }
1478      bitField0_ |= 0x00000004;
1479            codec_ = value;
1480            onChanged();
1481            return this;
1482          }
1483    
1484          // repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;
1485          private java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> sections_ =
1486            java.util.Collections.emptyList();
1487          private void ensureSectionsIsMutable() {
1488            if (!((bitField0_ & 0x00000008) == 0x00000008)) {
1489              sections_ = new java.util.ArrayList<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section>(sections_);
1490              bitField0_ |= 0x00000008;
1491             }
1492          }
1493    
1494          private com.google.protobuf.RepeatedFieldBuilder<
1495              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> sectionsBuilder_;
1496    
1497          /**
1498           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1499           */
1500          public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> getSectionsList() {
1501            if (sectionsBuilder_ == null) {
1502              return java.util.Collections.unmodifiableList(sections_);
1503            } else {
1504              return sectionsBuilder_.getMessageList();
1505            }
1506          }
1507          /**
1508           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1509           */
1510          public int getSectionsCount() {
1511            if (sectionsBuilder_ == null) {
1512              return sections_.size();
1513            } else {
1514              return sectionsBuilder_.getCount();
1515            }
1516          }
1517          /**
1518           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1519           */
1520          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getSections(int index) {
1521            if (sectionsBuilder_ == null) {
1522              return sections_.get(index);
1523            } else {
1524              return sectionsBuilder_.getMessage(index);
1525            }
1526          }
1527          /**
1528           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1529           */
1530          public Builder setSections(
1531              int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section value) {
1532            if (sectionsBuilder_ == null) {
1533              if (value == null) {
1534                throw new NullPointerException();
1535              }
1536              ensureSectionsIsMutable();
1537              sections_.set(index, value);
1538              onChanged();
1539            } else {
1540              sectionsBuilder_.setMessage(index, value);
1541            }
1542            return this;
1543          }
1544          /**
1545           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1546           */
1547          public Builder setSections(
1548              int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder builderForValue) {
1549            if (sectionsBuilder_ == null) {
1550              ensureSectionsIsMutable();
1551              sections_.set(index, builderForValue.build());
1552              onChanged();
1553            } else {
1554              sectionsBuilder_.setMessage(index, builderForValue.build());
1555            }
1556            return this;
1557          }
1558          /**
1559           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1560           */
1561          public Builder addSections(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section value) {
1562            if (sectionsBuilder_ == null) {
1563              if (value == null) {
1564                throw new NullPointerException();
1565              }
1566              ensureSectionsIsMutable();
1567              sections_.add(value);
1568              onChanged();
1569            } else {
1570              sectionsBuilder_.addMessage(value);
1571            }
1572            return this;
1573          }
1574          /**
1575           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1576           */
1577          public Builder addSections(
1578              int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section value) {
1579            if (sectionsBuilder_ == null) {
1580              if (value == null) {
1581                throw new NullPointerException();
1582              }
1583              ensureSectionsIsMutable();
1584              sections_.add(index, value);
1585              onChanged();
1586            } else {
1587              sectionsBuilder_.addMessage(index, value);
1588            }
1589            return this;
1590          }
1591          /**
1592           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1593           */
1594          public Builder addSections(
1595              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder builderForValue) {
1596            if (sectionsBuilder_ == null) {
1597              ensureSectionsIsMutable();
1598              sections_.add(builderForValue.build());
1599              onChanged();
1600            } else {
1601              sectionsBuilder_.addMessage(builderForValue.build());
1602            }
1603            return this;
1604          }
1605          /**
1606           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1607           */
1608          public Builder addSections(
1609              int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder builderForValue) {
1610            if (sectionsBuilder_ == null) {
1611              ensureSectionsIsMutable();
1612              sections_.add(index, builderForValue.build());
1613              onChanged();
1614            } else {
1615              sectionsBuilder_.addMessage(index, builderForValue.build());
1616            }
1617            return this;
1618          }
1619          /**
1620           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1621           */
1622          public Builder addAllSections(
1623              java.lang.Iterable<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> values) {
1624            if (sectionsBuilder_ == null) {
1625              ensureSectionsIsMutable();
1626              super.addAll(values, sections_);
1627              onChanged();
1628            } else {
1629              sectionsBuilder_.addAllMessages(values);
1630            }
1631            return this;
1632          }
1633          /**
1634           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1635           */
1636          public Builder clearSections() {
1637            if (sectionsBuilder_ == null) {
1638              sections_ = java.util.Collections.emptyList();
1639              bitField0_ = (bitField0_ & ~0x00000008);
1640              onChanged();
1641            } else {
1642              sectionsBuilder_.clear();
1643            }
1644            return this;
1645          }
1646          /**
1647           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1648           */
1649          public Builder removeSections(int index) {
1650            if (sectionsBuilder_ == null) {
1651              ensureSectionsIsMutable();
1652              sections_.remove(index);
1653              onChanged();
1654            } else {
1655              sectionsBuilder_.remove(index);
1656            }
1657            return this;
1658          }
1659          /**
1660           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1661           */
1662          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder getSectionsBuilder(
1663              int index) {
1664            return getSectionsFieldBuilder().getBuilder(index);
1665          }
1666          /**
1667           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1668           */
1669          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder getSectionsOrBuilder(
1670              int index) {
1671            if (sectionsBuilder_ == null) {
1672              return sections_.get(index);  } else {
1673              return sectionsBuilder_.getMessageOrBuilder(index);
1674            }
1675          }
1676          /**
1677           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1678           */
1679          public java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
1680               getSectionsOrBuilderList() {
1681            if (sectionsBuilder_ != null) {
1682              return sectionsBuilder_.getMessageOrBuilderList();
1683            } else {
1684              return java.util.Collections.unmodifiableList(sections_);
1685            }
1686          }
1687          /**
1688           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1689           */
1690          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder addSectionsBuilder() {
1691            return getSectionsFieldBuilder().addBuilder(
1692                org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance());
1693          }
1694          /**
1695           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1696           */
1697          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder addSectionsBuilder(
1698              int index) {
1699            return getSectionsFieldBuilder().addBuilder(
1700                index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance());
1701          }
1702          /**
1703           * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1704           */
1705          public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder> 
1706               getSectionsBuilderList() {
1707            return getSectionsFieldBuilder().getBuilderList();
1708          }
1709          private com.google.protobuf.RepeatedFieldBuilder<
1710              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
1711              getSectionsFieldBuilder() {
1712            if (sectionsBuilder_ == null) {
1713              sectionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
1714                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder>(
1715                      sections_,
1716                      ((bitField0_ & 0x00000008) == 0x00000008),
1717                      getParentForChildren(),
1718                      isClean());
1719              sections_ = null;
1720            }
1721            return sectionsBuilder_;
1722          }
1723    
1724          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FileSummary)
1725        }
1726    
1727        static {
1728          defaultInstance = new FileSummary(true);
1729          defaultInstance.initFields();
1730        }
1731    
1732        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FileSummary)
1733      }
1734    
1735      public interface NameSystemSectionOrBuilder
1736          extends com.google.protobuf.MessageOrBuilder {
1737    
1738        // optional uint32 namespaceId = 1;
1739        /**
1740         * <code>optional uint32 namespaceId = 1;</code>
1741         */
1742        boolean hasNamespaceId();
1743        /**
1744         * <code>optional uint32 namespaceId = 1;</code>
1745         */
1746        int getNamespaceId();
1747    
1748        // optional uint64 genstampV1 = 2;
1749        /**
1750         * <code>optional uint64 genstampV1 = 2;</code>
1751         */
1752        boolean hasGenstampV1();
1753        /**
1754         * <code>optional uint64 genstampV1 = 2;</code>
1755         */
1756        long getGenstampV1();
1757    
1758        // optional uint64 genstampV2 = 3;
1759        /**
1760         * <code>optional uint64 genstampV2 = 3;</code>
1761         */
1762        boolean hasGenstampV2();
1763        /**
1764         * <code>optional uint64 genstampV2 = 3;</code>
1765         */
1766        long getGenstampV2();
1767    
1768        // optional uint64 genstampV1Limit = 4;
1769        /**
1770         * <code>optional uint64 genstampV1Limit = 4;</code>
1771         */
1772        boolean hasGenstampV1Limit();
1773        /**
1774         * <code>optional uint64 genstampV1Limit = 4;</code>
1775         */
1776        long getGenstampV1Limit();
1777    
1778        // optional uint64 lastAllocatedBlockId = 5;
1779        /**
1780         * <code>optional uint64 lastAllocatedBlockId = 5;</code>
1781         */
1782        boolean hasLastAllocatedBlockId();
1783        /**
1784         * <code>optional uint64 lastAllocatedBlockId = 5;</code>
1785         */
1786        long getLastAllocatedBlockId();
1787    
1788        // optional uint64 transactionId = 6;
1789        /**
1790         * <code>optional uint64 transactionId = 6;</code>
1791         */
1792        boolean hasTransactionId();
1793        /**
1794         * <code>optional uint64 transactionId = 6;</code>
1795         */
1796        long getTransactionId();
1797    
1798        // optional uint64 rollingUpgradeStartTime = 7;
1799        /**
1800         * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
1801         */
1802        boolean hasRollingUpgradeStartTime();
1803        /**
1804         * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
1805         */
1806        long getRollingUpgradeStartTime();
1807      }
1808      /**
1809       * Protobuf type {@code hadoop.hdfs.fsimage.NameSystemSection}
1810       *
1811       * <pre>
1812       **
1813       * Name: NS_INFO
1814       * </pre>
1815       */
1816      public static final class NameSystemSection extends
1817          com.google.protobuf.GeneratedMessage
1818          implements NameSystemSectionOrBuilder {
1819        // Use NameSystemSection.newBuilder() to construct.
1820        private NameSystemSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1821          super(builder);
1822          this.unknownFields = builder.getUnknownFields();
1823        }
1824        private NameSystemSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1825    
1826        private static final NameSystemSection defaultInstance;
1827        public static NameSystemSection getDefaultInstance() {
1828          return defaultInstance;
1829        }
1830    
1831        public NameSystemSection getDefaultInstanceForType() {
1832          return defaultInstance;
1833        }
1834    
1835        private final com.google.protobuf.UnknownFieldSet unknownFields;
1836        @java.lang.Override
1837        public final com.google.protobuf.UnknownFieldSet
1838            getUnknownFields() {
1839          return this.unknownFields;
1840        }
1841        private NameSystemSection(
1842            com.google.protobuf.CodedInputStream input,
1843            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1844            throws com.google.protobuf.InvalidProtocolBufferException {
1845          initFields();
1846          int mutable_bitField0_ = 0;
1847          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1848              com.google.protobuf.UnknownFieldSet.newBuilder();
1849          try {
1850            boolean done = false;
1851            while (!done) {
1852              int tag = input.readTag();
1853              switch (tag) {
1854                case 0:
1855                  done = true;
1856                  break;
1857                default: {
1858                  if (!parseUnknownField(input, unknownFields,
1859                                         extensionRegistry, tag)) {
1860                    done = true;
1861                  }
1862                  break;
1863                }
1864                case 8: {
1865                  bitField0_ |= 0x00000001;
1866                  namespaceId_ = input.readUInt32();
1867                  break;
1868                }
1869                case 16: {
1870                  bitField0_ |= 0x00000002;
1871                  genstampV1_ = input.readUInt64();
1872                  break;
1873                }
1874                case 24: {
1875                  bitField0_ |= 0x00000004;
1876                  genstampV2_ = input.readUInt64();
1877                  break;
1878                }
1879                case 32: {
1880                  bitField0_ |= 0x00000008;
1881                  genstampV1Limit_ = input.readUInt64();
1882                  break;
1883                }
1884                case 40: {
1885                  bitField0_ |= 0x00000010;
1886                  lastAllocatedBlockId_ = input.readUInt64();
1887                  break;
1888                }
1889                case 48: {
1890                  bitField0_ |= 0x00000020;
1891                  transactionId_ = input.readUInt64();
1892                  break;
1893                }
1894                case 56: {
1895                  bitField0_ |= 0x00000040;
1896                  rollingUpgradeStartTime_ = input.readUInt64();
1897                  break;
1898                }
1899              }
1900            }
1901          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1902            throw e.setUnfinishedMessage(this);
1903          } catch (java.io.IOException e) {
1904            throw new com.google.protobuf.InvalidProtocolBufferException(
1905                e.getMessage()).setUnfinishedMessage(this);
1906          } finally {
1907            this.unknownFields = unknownFields.build();
1908            makeExtensionsImmutable();
1909          }
1910        }
1911        public static final com.google.protobuf.Descriptors.Descriptor
1912            getDescriptor() {
1913          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
1914        }
1915    
1916        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1917            internalGetFieldAccessorTable() {
1918          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable
1919              .ensureFieldAccessorsInitialized(
1920                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.Builder.class);
1921        }
1922    
1923        public static com.google.protobuf.Parser<NameSystemSection> PARSER =
1924            new com.google.protobuf.AbstractParser<NameSystemSection>() {
1925          public NameSystemSection parsePartialFrom(
1926              com.google.protobuf.CodedInputStream input,
1927              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1928              throws com.google.protobuf.InvalidProtocolBufferException {
1929            return new NameSystemSection(input, extensionRegistry);
1930          }
1931        };
1932    
1933        @java.lang.Override
1934        public com.google.protobuf.Parser<NameSystemSection> getParserForType() {
1935          return PARSER;
1936        }
1937    
1938        private int bitField0_;
1939        // optional uint32 namespaceId = 1;
1940        public static final int NAMESPACEID_FIELD_NUMBER = 1;
1941        private int namespaceId_;
1942        /**
1943         * <code>optional uint32 namespaceId = 1;</code>
1944         */
1945        public boolean hasNamespaceId() {
1946          return ((bitField0_ & 0x00000001) == 0x00000001);
1947        }
1948        /**
1949         * <code>optional uint32 namespaceId = 1;</code>
1950         */
1951        public int getNamespaceId() {
1952          return namespaceId_;
1953        }
1954    
1955        // optional uint64 genstampV1 = 2;
1956        public static final int GENSTAMPV1_FIELD_NUMBER = 2;
1957        private long genstampV1_;
1958        /**
1959         * <code>optional uint64 genstampV1 = 2;</code>
1960         */
1961        public boolean hasGenstampV1() {
1962          return ((bitField0_ & 0x00000002) == 0x00000002);
1963        }
1964        /**
1965         * <code>optional uint64 genstampV1 = 2;</code>
1966         */
1967        public long getGenstampV1() {
1968          return genstampV1_;
1969        }
1970    
1971        // optional uint64 genstampV2 = 3;
1972        public static final int GENSTAMPV2_FIELD_NUMBER = 3;
1973        private long genstampV2_;
1974        /**
1975         * <code>optional uint64 genstampV2 = 3;</code>
1976         */
1977        public boolean hasGenstampV2() {
1978          return ((bitField0_ & 0x00000004) == 0x00000004);
1979        }
1980        /**
1981         * <code>optional uint64 genstampV2 = 3;</code>
1982         */
1983        public long getGenstampV2() {
1984          return genstampV2_;
1985        }
1986    
1987        // optional uint64 genstampV1Limit = 4;
1988        public static final int GENSTAMPV1LIMIT_FIELD_NUMBER = 4;
1989        private long genstampV1Limit_;
1990        /**
1991         * <code>optional uint64 genstampV1Limit = 4;</code>
1992         */
1993        public boolean hasGenstampV1Limit() {
1994          return ((bitField0_ & 0x00000008) == 0x00000008);
1995        }
1996        /**
1997         * <code>optional uint64 genstampV1Limit = 4;</code>
1998         */
1999        public long getGenstampV1Limit() {
2000          return genstampV1Limit_;
2001        }
2002    
2003        // optional uint64 lastAllocatedBlockId = 5;
2004        public static final int LASTALLOCATEDBLOCKID_FIELD_NUMBER = 5;
2005        private long lastAllocatedBlockId_;
2006        /**
2007         * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2008         */
2009        public boolean hasLastAllocatedBlockId() {
2010          return ((bitField0_ & 0x00000010) == 0x00000010);
2011        }
2012        /**
2013         * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2014         */
2015        public long getLastAllocatedBlockId() {
2016          return lastAllocatedBlockId_;
2017        }
2018    
2019        // optional uint64 transactionId = 6;
2020        public static final int TRANSACTIONID_FIELD_NUMBER = 6;
2021        private long transactionId_;
2022        /**
2023         * <code>optional uint64 transactionId = 6;</code>
2024         */
2025        public boolean hasTransactionId() {
2026          return ((bitField0_ & 0x00000020) == 0x00000020);
2027        }
2028        /**
2029         * <code>optional uint64 transactionId = 6;</code>
2030         */
2031        public long getTransactionId() {
2032          return transactionId_;
2033        }
2034    
2035        // optional uint64 rollingUpgradeStartTime = 7;
2036        public static final int ROLLINGUPGRADESTARTTIME_FIELD_NUMBER = 7;
2037        private long rollingUpgradeStartTime_;
2038        /**
2039         * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2040         */
2041        public boolean hasRollingUpgradeStartTime() {
2042          return ((bitField0_ & 0x00000040) == 0x00000040);
2043        }
2044        /**
2045         * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2046         */
2047        public long getRollingUpgradeStartTime() {
2048          return rollingUpgradeStartTime_;
2049        }
2050    
2051        private void initFields() {
2052          namespaceId_ = 0;
2053          genstampV1_ = 0L;
2054          genstampV2_ = 0L;
2055          genstampV1Limit_ = 0L;
2056          lastAllocatedBlockId_ = 0L;
2057          transactionId_ = 0L;
2058          rollingUpgradeStartTime_ = 0L;
2059        }
2060        private byte memoizedIsInitialized = -1;
2061        public final boolean isInitialized() {
2062          byte isInitialized = memoizedIsInitialized;
2063          if (isInitialized != -1) return isInitialized == 1;
2064    
2065          memoizedIsInitialized = 1;
2066          return true;
2067        }
2068    
2069        public void writeTo(com.google.protobuf.CodedOutputStream output)
2070                            throws java.io.IOException {
2071          getSerializedSize();
2072          if (((bitField0_ & 0x00000001) == 0x00000001)) {
2073            output.writeUInt32(1, namespaceId_);
2074          }
2075          if (((bitField0_ & 0x00000002) == 0x00000002)) {
2076            output.writeUInt64(2, genstampV1_);
2077          }
2078          if (((bitField0_ & 0x00000004) == 0x00000004)) {
2079            output.writeUInt64(3, genstampV2_);
2080          }
2081          if (((bitField0_ & 0x00000008) == 0x00000008)) {
2082            output.writeUInt64(4, genstampV1Limit_);
2083          }
2084          if (((bitField0_ & 0x00000010) == 0x00000010)) {
2085            output.writeUInt64(5, lastAllocatedBlockId_);
2086          }
2087          if (((bitField0_ & 0x00000020) == 0x00000020)) {
2088            output.writeUInt64(6, transactionId_);
2089          }
2090          if (((bitField0_ & 0x00000040) == 0x00000040)) {
2091            output.writeUInt64(7, rollingUpgradeStartTime_);
2092          }
2093          getUnknownFields().writeTo(output);
2094        }
2095    
2096        private int memoizedSerializedSize = -1;
2097        public int getSerializedSize() {
2098          int size = memoizedSerializedSize;
2099          if (size != -1) return size;
2100    
2101          size = 0;
2102          if (((bitField0_ & 0x00000001) == 0x00000001)) {
2103            size += com.google.protobuf.CodedOutputStream
2104              .computeUInt32Size(1, namespaceId_);
2105          }
2106          if (((bitField0_ & 0x00000002) == 0x00000002)) {
2107            size += com.google.protobuf.CodedOutputStream
2108              .computeUInt64Size(2, genstampV1_);
2109          }
2110          if (((bitField0_ & 0x00000004) == 0x00000004)) {
2111            size += com.google.protobuf.CodedOutputStream
2112              .computeUInt64Size(3, genstampV2_);
2113          }
2114          if (((bitField0_ & 0x00000008) == 0x00000008)) {
2115            size += com.google.protobuf.CodedOutputStream
2116              .computeUInt64Size(4, genstampV1Limit_);
2117          }
2118          if (((bitField0_ & 0x00000010) == 0x00000010)) {
2119            size += com.google.protobuf.CodedOutputStream
2120              .computeUInt64Size(5, lastAllocatedBlockId_);
2121          }
2122          if (((bitField0_ & 0x00000020) == 0x00000020)) {
2123            size += com.google.protobuf.CodedOutputStream
2124              .computeUInt64Size(6, transactionId_);
2125          }
2126          if (((bitField0_ & 0x00000040) == 0x00000040)) {
2127            size += com.google.protobuf.CodedOutputStream
2128              .computeUInt64Size(7, rollingUpgradeStartTime_);
2129          }
2130          size += getUnknownFields().getSerializedSize();
2131          memoizedSerializedSize = size;
2132          return size;
2133        }
2134    
2135        private static final long serialVersionUID = 0L;
2136        @java.lang.Override
2137        protected java.lang.Object writeReplace()
2138            throws java.io.ObjectStreamException {
2139          return super.writeReplace();
2140        }
2141    
2142        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2143            com.google.protobuf.ByteString data)
2144            throws com.google.protobuf.InvalidProtocolBufferException {
2145          return PARSER.parseFrom(data);
2146        }
2147        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2148            com.google.protobuf.ByteString data,
2149            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2150            throws com.google.protobuf.InvalidProtocolBufferException {
2151          return PARSER.parseFrom(data, extensionRegistry);
2152        }
2153        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(byte[] data)
2154            throws com.google.protobuf.InvalidProtocolBufferException {
2155          return PARSER.parseFrom(data);
2156        }
2157        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2158            byte[] data,
2159            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2160            throws com.google.protobuf.InvalidProtocolBufferException {
2161          return PARSER.parseFrom(data, extensionRegistry);
2162        }
2163        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(java.io.InputStream input)
2164            throws java.io.IOException {
2165          return PARSER.parseFrom(input);
2166        }
2167        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2168            java.io.InputStream input,
2169            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2170            throws java.io.IOException {
2171          return PARSER.parseFrom(input, extensionRegistry);
2172        }
2173        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseDelimitedFrom(java.io.InputStream input)
2174            throws java.io.IOException {
2175          return PARSER.parseDelimitedFrom(input);
2176        }
2177        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseDelimitedFrom(
2178            java.io.InputStream input,
2179            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2180            throws java.io.IOException {
2181          return PARSER.parseDelimitedFrom(input, extensionRegistry);
2182        }
2183        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2184            com.google.protobuf.CodedInputStream input)
2185            throws java.io.IOException {
2186          return PARSER.parseFrom(input);
2187        }
2188        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2189            com.google.protobuf.CodedInputStream input,
2190            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2191            throws java.io.IOException {
2192          return PARSER.parseFrom(input, extensionRegistry);
2193        }
2194    
2195        public static Builder newBuilder() { return Builder.create(); }
2196        public Builder newBuilderForType() { return newBuilder(); }
2197        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection prototype) {
2198          return newBuilder().mergeFrom(prototype);
2199        }
2200        public Builder toBuilder() { return newBuilder(this); }
2201    
2202        @java.lang.Override
2203        protected Builder newBuilderForType(
2204            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2205          Builder builder = new Builder(parent);
2206          return builder;
2207        }
2208        /**
2209         * Protobuf type {@code hadoop.hdfs.fsimage.NameSystemSection}
2210         *
2211         * <pre>
2212         **
2213         * Name: NS_INFO
2214         * </pre>
2215         */
2216        public static final class Builder extends
2217            com.google.protobuf.GeneratedMessage.Builder<Builder>
2218           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSectionOrBuilder {
2219          public static final com.google.protobuf.Descriptors.Descriptor
2220              getDescriptor() {
2221            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
2222          }
2223    
2224          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2225              internalGetFieldAccessorTable() {
2226            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable
2227                .ensureFieldAccessorsInitialized(
2228                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.Builder.class);
2229          }
2230    
2231          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.newBuilder()
2232          private Builder() {
2233            maybeForceBuilderInitialization();
2234          }
2235    
2236          private Builder(
2237              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2238            super(parent);
2239            maybeForceBuilderInitialization();
2240          }
2241          private void maybeForceBuilderInitialization() {
2242            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2243            }
2244          }
2245          private static Builder create() {
2246            return new Builder();
2247          }
2248    
2249          public Builder clear() {
2250            super.clear();
2251            namespaceId_ = 0;
2252            bitField0_ = (bitField0_ & ~0x00000001);
2253            genstampV1_ = 0L;
2254            bitField0_ = (bitField0_ & ~0x00000002);
2255            genstampV2_ = 0L;
2256            bitField0_ = (bitField0_ & ~0x00000004);
2257            genstampV1Limit_ = 0L;
2258            bitField0_ = (bitField0_ & ~0x00000008);
2259            lastAllocatedBlockId_ = 0L;
2260            bitField0_ = (bitField0_ & ~0x00000010);
2261            transactionId_ = 0L;
2262            bitField0_ = (bitField0_ & ~0x00000020);
2263            rollingUpgradeStartTime_ = 0L;
2264            bitField0_ = (bitField0_ & ~0x00000040);
2265            return this;
2266          }
2267    
2268          public Builder clone() {
2269            return create().mergeFrom(buildPartial());
2270          }
2271    
2272          public com.google.protobuf.Descriptors.Descriptor
2273              getDescriptorForType() {
2274            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
2275          }
2276    
2277          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection getDefaultInstanceForType() {
2278            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.getDefaultInstance();
2279          }
2280    
2281          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection build() {
2282            org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection result = buildPartial();
2283            if (!result.isInitialized()) {
2284              throw newUninitializedMessageException(result);
2285            }
2286            return result;
2287          }
2288    
2289          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection buildPartial() {
2290            org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection(this);
2291            int from_bitField0_ = bitField0_;
2292            int to_bitField0_ = 0;
2293            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2294              to_bitField0_ |= 0x00000001;
2295            }
2296            result.namespaceId_ = namespaceId_;
2297            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
2298              to_bitField0_ |= 0x00000002;
2299            }
2300            result.genstampV1_ = genstampV1_;
2301            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
2302              to_bitField0_ |= 0x00000004;
2303            }
2304            result.genstampV2_ = genstampV2_;
2305            if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
2306              to_bitField0_ |= 0x00000008;
2307            }
2308            result.genstampV1Limit_ = genstampV1Limit_;
2309            if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
2310              to_bitField0_ |= 0x00000010;
2311            }
2312            result.lastAllocatedBlockId_ = lastAllocatedBlockId_;
2313            if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
2314              to_bitField0_ |= 0x00000020;
2315            }
2316            result.transactionId_ = transactionId_;
2317            if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
2318              to_bitField0_ |= 0x00000040;
2319            }
2320            result.rollingUpgradeStartTime_ = rollingUpgradeStartTime_;
2321            result.bitField0_ = to_bitField0_;
2322            onBuilt();
2323            return result;
2324          }
2325    
2326          public Builder mergeFrom(com.google.protobuf.Message other) {
2327            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection) {
2328              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection)other);
2329            } else {
2330              super.mergeFrom(other);
2331              return this;
2332            }
2333          }
2334    
2335          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection other) {
2336            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.getDefaultInstance()) return this;
2337            if (other.hasNamespaceId()) {
2338              setNamespaceId(other.getNamespaceId());
2339            }
2340            if (other.hasGenstampV1()) {
2341              setGenstampV1(other.getGenstampV1());
2342            }
2343            if (other.hasGenstampV2()) {
2344              setGenstampV2(other.getGenstampV2());
2345            }
2346            if (other.hasGenstampV1Limit()) {
2347              setGenstampV1Limit(other.getGenstampV1Limit());
2348            }
2349            if (other.hasLastAllocatedBlockId()) {
2350              setLastAllocatedBlockId(other.getLastAllocatedBlockId());
2351            }
2352            if (other.hasTransactionId()) {
2353              setTransactionId(other.getTransactionId());
2354            }
2355            if (other.hasRollingUpgradeStartTime()) {
2356              setRollingUpgradeStartTime(other.getRollingUpgradeStartTime());
2357            }
2358            this.mergeUnknownFields(other.getUnknownFields());
2359            return this;
2360          }
2361    
2362          public final boolean isInitialized() {
2363            return true;
2364          }
2365    
2366          public Builder mergeFrom(
2367              com.google.protobuf.CodedInputStream input,
2368              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2369              throws java.io.IOException {
2370            org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parsedMessage = null;
2371            try {
2372              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2373            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2374              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection) e.getUnfinishedMessage();
2375              throw e;
2376            } finally {
2377              if (parsedMessage != null) {
2378                mergeFrom(parsedMessage);
2379              }
2380            }
2381            return this;
2382          }
2383          private int bitField0_;
2384    
2385          // optional uint32 namespaceId = 1;
2386          private int namespaceId_ ;
2387          /**
2388           * <code>optional uint32 namespaceId = 1;</code>
2389           */
2390          public boolean hasNamespaceId() {
2391            return ((bitField0_ & 0x00000001) == 0x00000001);
2392          }
2393          /**
2394           * <code>optional uint32 namespaceId = 1;</code>
2395           */
2396          public int getNamespaceId() {
2397            return namespaceId_;
2398          }
2399          /**
2400           * <code>optional uint32 namespaceId = 1;</code>
2401           */
2402          public Builder setNamespaceId(int value) {
2403            bitField0_ |= 0x00000001;
2404            namespaceId_ = value;
2405            onChanged();
2406            return this;
2407          }
2408          /**
2409           * <code>optional uint32 namespaceId = 1;</code>
2410           */
2411          public Builder clearNamespaceId() {
2412            bitField0_ = (bitField0_ & ~0x00000001);
2413            namespaceId_ = 0;
2414            onChanged();
2415            return this;
2416          }
2417    
2418          // optional uint64 genstampV1 = 2;
2419          private long genstampV1_ ;
2420          /**
2421           * <code>optional uint64 genstampV1 = 2;</code>
2422           */
2423          public boolean hasGenstampV1() {
2424            return ((bitField0_ & 0x00000002) == 0x00000002);
2425          }
2426          /**
2427           * <code>optional uint64 genstampV1 = 2;</code>
2428           */
2429          public long getGenstampV1() {
2430            return genstampV1_;
2431          }
2432          /**
2433           * <code>optional uint64 genstampV1 = 2;</code>
2434           */
2435          public Builder setGenstampV1(long value) {
2436            bitField0_ |= 0x00000002;
2437            genstampV1_ = value;
2438            onChanged();
2439            return this;
2440          }
2441          /**
2442           * <code>optional uint64 genstampV1 = 2;</code>
2443           */
2444          public Builder clearGenstampV1() {
2445            bitField0_ = (bitField0_ & ~0x00000002);
2446            genstampV1_ = 0L;
2447            onChanged();
2448            return this;
2449          }
2450    
2451          // optional uint64 genstampV2 = 3;
2452          private long genstampV2_ ;
2453          /**
2454           * <code>optional uint64 genstampV2 = 3;</code>
2455           */
2456          public boolean hasGenstampV2() {
2457            return ((bitField0_ & 0x00000004) == 0x00000004);
2458          }
2459          /**
2460           * <code>optional uint64 genstampV2 = 3;</code>
2461           */
2462          public long getGenstampV2() {
2463            return genstampV2_;
2464          }
2465          /**
2466           * <code>optional uint64 genstampV2 = 3;</code>
2467           */
2468          public Builder setGenstampV2(long value) {
2469            bitField0_ |= 0x00000004;
2470            genstampV2_ = value;
2471            onChanged();
2472            return this;
2473          }
2474          /**
2475           * <code>optional uint64 genstampV2 = 3;</code>
2476           */
2477          public Builder clearGenstampV2() {
2478            bitField0_ = (bitField0_ & ~0x00000004);
2479            genstampV2_ = 0L;
2480            onChanged();
2481            return this;
2482          }
2483    
2484          // optional uint64 genstampV1Limit = 4;
2485          private long genstampV1Limit_ ;
2486          /**
2487           * <code>optional uint64 genstampV1Limit = 4;</code>
2488           */
2489          public boolean hasGenstampV1Limit() {
2490            return ((bitField0_ & 0x00000008) == 0x00000008);
2491          }
2492          /**
2493           * <code>optional uint64 genstampV1Limit = 4;</code>
2494           */
2495          public long getGenstampV1Limit() {
2496            return genstampV1Limit_;
2497          }
2498          /**
2499           * <code>optional uint64 genstampV1Limit = 4;</code>
2500           */
2501          public Builder setGenstampV1Limit(long value) {
2502            bitField0_ |= 0x00000008;
2503            genstampV1Limit_ = value;
2504            onChanged();
2505            return this;
2506          }
2507          /**
2508           * <code>optional uint64 genstampV1Limit = 4;</code>
2509           */
2510          public Builder clearGenstampV1Limit() {
2511            bitField0_ = (bitField0_ & ~0x00000008);
2512            genstampV1Limit_ = 0L;
2513            onChanged();
2514            return this;
2515          }
2516    
2517          // optional uint64 lastAllocatedBlockId = 5;
2518          private long lastAllocatedBlockId_ ;
2519          /**
2520           * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2521           */
2522          public boolean hasLastAllocatedBlockId() {
2523            return ((bitField0_ & 0x00000010) == 0x00000010);
2524          }
2525          /**
2526           * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2527           */
2528          public long getLastAllocatedBlockId() {
2529            return lastAllocatedBlockId_;
2530          }
2531          /**
2532           * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2533           */
2534          public Builder setLastAllocatedBlockId(long value) {
2535            bitField0_ |= 0x00000010;
2536            lastAllocatedBlockId_ = value;
2537            onChanged();
2538            return this;
2539          }
2540          /**
2541           * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2542           */
2543          public Builder clearLastAllocatedBlockId() {
2544            bitField0_ = (bitField0_ & ~0x00000010);
2545            lastAllocatedBlockId_ = 0L;
2546            onChanged();
2547            return this;
2548          }
2549    
2550          // optional uint64 transactionId = 6;
2551          private long transactionId_ ;
2552          /**
2553           * <code>optional uint64 transactionId = 6;</code>
2554           */
2555          public boolean hasTransactionId() {
2556            return ((bitField0_ & 0x00000020) == 0x00000020);
2557          }
2558          /**
2559           * <code>optional uint64 transactionId = 6;</code>
2560           */
2561          public long getTransactionId() {
2562            return transactionId_;
2563          }
2564          /**
2565           * <code>optional uint64 transactionId = 6;</code>
2566           */
2567          public Builder setTransactionId(long value) {
2568            bitField0_ |= 0x00000020;
2569            transactionId_ = value;
2570            onChanged();
2571            return this;
2572          }
2573          /**
2574           * <code>optional uint64 transactionId = 6;</code>
2575           */
2576          public Builder clearTransactionId() {
2577            bitField0_ = (bitField0_ & ~0x00000020);
2578            transactionId_ = 0L;
2579            onChanged();
2580            return this;
2581          }
2582    
2583          // optional uint64 rollingUpgradeStartTime = 7;
2584          private long rollingUpgradeStartTime_ ;
2585          /**
2586           * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2587           */
2588          public boolean hasRollingUpgradeStartTime() {
2589            return ((bitField0_ & 0x00000040) == 0x00000040);
2590          }
2591          /**
2592           * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2593           */
2594          public long getRollingUpgradeStartTime() {
2595            return rollingUpgradeStartTime_;
2596          }
2597          /**
2598           * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2599           */
2600          public Builder setRollingUpgradeStartTime(long value) {
2601            bitField0_ |= 0x00000040;
2602            rollingUpgradeStartTime_ = value;
2603            onChanged();
2604            return this;
2605          }
2606          /**
2607           * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2608           */
2609          public Builder clearRollingUpgradeStartTime() {
2610            bitField0_ = (bitField0_ & ~0x00000040);
2611            rollingUpgradeStartTime_ = 0L;
2612            onChanged();
2613            return this;
2614          }
2615    
2616          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.NameSystemSection)
2617        }
2618    
2619        static {
2620          defaultInstance = new NameSystemSection(true);
2621          defaultInstance.initFields();
2622        }
2623    
2624        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.NameSystemSection)
2625      }
2626    
2627      public interface INodeSectionOrBuilder
2628          extends com.google.protobuf.MessageOrBuilder {
2629    
2630        // optional uint64 lastInodeId = 1;
2631        /**
2632         * <code>optional uint64 lastInodeId = 1;</code>
2633         */
2634        boolean hasLastInodeId();
2635        /**
2636         * <code>optional uint64 lastInodeId = 1;</code>
2637         */
2638        long getLastInodeId();
2639    
2640        // optional uint64 numInodes = 2;
2641        /**
2642         * <code>optional uint64 numInodes = 2;</code>
2643         *
2644         * <pre>
2645         * repeated INodes..
2646         * </pre>
2647         */
2648        boolean hasNumInodes();
2649        /**
2650         * <code>optional uint64 numInodes = 2;</code>
2651         *
2652         * <pre>
2653         * repeated INodes..
2654         * </pre>
2655         */
2656        long getNumInodes();
2657      }
2658      /**
2659       * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection}
2660       *
2661       * <pre>
2662       **
2663       * Permission is serialized as a 64-bit long. [0:24):[25:48):[48:64) (in Big Endian).
2664       * The first and the second parts are the string ids of the user and
2665       * group name, and the last 16 bits are the permission bits.
2666       *
2667       * Name: INODE
2668       * </pre>
2669       */
2670      public static final class INodeSection extends
2671          com.google.protobuf.GeneratedMessage
2672          implements INodeSectionOrBuilder {
2673        // Use INodeSection.newBuilder() to construct.
2674        private INodeSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2675          super(builder);
2676          this.unknownFields = builder.getUnknownFields();
2677        }
2678        private INodeSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2679    
2680        private static final INodeSection defaultInstance;
2681        public static INodeSection getDefaultInstance() {
2682          return defaultInstance;
2683        }
2684    
2685        public INodeSection getDefaultInstanceForType() {
2686          return defaultInstance;
2687        }
2688    
2689        private final com.google.protobuf.UnknownFieldSet unknownFields;
2690        @java.lang.Override
2691        public final com.google.protobuf.UnknownFieldSet
2692            getUnknownFields() {
2693          return this.unknownFields;
2694        }
2695        private INodeSection(
2696            com.google.protobuf.CodedInputStream input,
2697            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2698            throws com.google.protobuf.InvalidProtocolBufferException {
2699          initFields();
2700          int mutable_bitField0_ = 0;
2701          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2702              com.google.protobuf.UnknownFieldSet.newBuilder();
2703          try {
2704            boolean done = false;
2705            while (!done) {
2706              int tag = input.readTag();
2707              switch (tag) {
2708                case 0:
2709                  done = true;
2710                  break;
2711                default: {
2712                  if (!parseUnknownField(input, unknownFields,
2713                                         extensionRegistry, tag)) {
2714                    done = true;
2715                  }
2716                  break;
2717                }
2718                case 8: {
2719                  bitField0_ |= 0x00000001;
2720                  lastInodeId_ = input.readUInt64();
2721                  break;
2722                }
2723                case 16: {
2724                  bitField0_ |= 0x00000002;
2725                  numInodes_ = input.readUInt64();
2726                  break;
2727                }
2728              }
2729            }
2730          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2731            throw e.setUnfinishedMessage(this);
2732          } catch (java.io.IOException e) {
2733            throw new com.google.protobuf.InvalidProtocolBufferException(
2734                e.getMessage()).setUnfinishedMessage(this);
2735          } finally {
2736            this.unknownFields = unknownFields.build();
2737            makeExtensionsImmutable();
2738          }
2739        }
2740        public static final com.google.protobuf.Descriptors.Descriptor
2741            getDescriptor() {
2742          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
2743        }
2744    
2745        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2746            internalGetFieldAccessorTable() {
2747          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable
2748              .ensureFieldAccessorsInitialized(
2749                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.Builder.class);
2750        }
2751    
2752        public static com.google.protobuf.Parser<INodeSection> PARSER =
2753            new com.google.protobuf.AbstractParser<INodeSection>() {
2754          public INodeSection parsePartialFrom(
2755              com.google.protobuf.CodedInputStream input,
2756              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2757              throws com.google.protobuf.InvalidProtocolBufferException {
2758            return new INodeSection(input, extensionRegistry);
2759          }
2760        };
2761    
2762        @java.lang.Override
2763        public com.google.protobuf.Parser<INodeSection> getParserForType() {
2764          return PARSER;
2765        }
2766    
2767        public interface FileUnderConstructionFeatureOrBuilder
2768            extends com.google.protobuf.MessageOrBuilder {
2769    
2770          // optional string clientName = 1;
2771          /**
2772           * <code>optional string clientName = 1;</code>
2773           */
2774          boolean hasClientName();
2775          /**
2776           * <code>optional string clientName = 1;</code>
2777           */
2778          java.lang.String getClientName();
2779          /**
2780           * <code>optional string clientName = 1;</code>
2781           */
2782          com.google.protobuf.ByteString
2783              getClientNameBytes();
2784    
2785          // optional string clientMachine = 2;
2786          /**
2787           * <code>optional string clientMachine = 2;</code>
2788           */
2789          boolean hasClientMachine();
2790          /**
2791           * <code>optional string clientMachine = 2;</code>
2792           */
2793          java.lang.String getClientMachine();
2794          /**
2795           * <code>optional string clientMachine = 2;</code>
2796           */
2797          com.google.protobuf.ByteString
2798              getClientMachineBytes();
2799        }
2800        /**
2801         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature}
2802         *
2803         * <pre>
2804         **
2805         * under-construction feature for INodeFile
2806         * </pre>
2807         */
2808        public static final class FileUnderConstructionFeature extends
2809            com.google.protobuf.GeneratedMessage
2810            implements FileUnderConstructionFeatureOrBuilder {
2811          // Use FileUnderConstructionFeature.newBuilder() to construct.
2812          private FileUnderConstructionFeature(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2813            super(builder);
2814            this.unknownFields = builder.getUnknownFields();
2815          }
2816          private FileUnderConstructionFeature(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2817    
2818          private static final FileUnderConstructionFeature defaultInstance;
2819          public static FileUnderConstructionFeature getDefaultInstance() {
2820            return defaultInstance;
2821          }
2822    
2823          public FileUnderConstructionFeature getDefaultInstanceForType() {
2824            return defaultInstance;
2825          }
2826    
2827          private final com.google.protobuf.UnknownFieldSet unknownFields;
2828          @java.lang.Override
2829          public final com.google.protobuf.UnknownFieldSet
2830              getUnknownFields() {
2831            return this.unknownFields;
2832          }
2833          private FileUnderConstructionFeature(
2834              com.google.protobuf.CodedInputStream input,
2835              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2836              throws com.google.protobuf.InvalidProtocolBufferException {
2837            initFields();
2838            int mutable_bitField0_ = 0;
2839            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2840                com.google.protobuf.UnknownFieldSet.newBuilder();
2841            try {
2842              boolean done = false;
2843              while (!done) {
2844                int tag = input.readTag();
2845                switch (tag) {
2846                  case 0:
2847                    done = true;
2848                    break;
2849                  default: {
2850                    if (!parseUnknownField(input, unknownFields,
2851                                           extensionRegistry, tag)) {
2852                      done = true;
2853                    }
2854                    break;
2855                  }
2856                  case 10: {
2857                    bitField0_ |= 0x00000001;
2858                    clientName_ = input.readBytes();
2859                    break;
2860                  }
2861                  case 18: {
2862                    bitField0_ |= 0x00000002;
2863                    clientMachine_ = input.readBytes();
2864                    break;
2865                  }
2866                }
2867              }
2868            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2869              throw e.setUnfinishedMessage(this);
2870            } catch (java.io.IOException e) {
2871              throw new com.google.protobuf.InvalidProtocolBufferException(
2872                  e.getMessage()).setUnfinishedMessage(this);
2873            } finally {
2874              this.unknownFields = unknownFields.build();
2875              makeExtensionsImmutable();
2876            }
2877          }
2878          public static final com.google.protobuf.Descriptors.Descriptor
2879              getDescriptor() {
2880            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
2881          }
2882    
2883          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2884              internalGetFieldAccessorTable() {
2885            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable
2886                .ensureFieldAccessorsInitialized(
2887                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder.class);
2888          }
2889    
2890          public static com.google.protobuf.Parser<FileUnderConstructionFeature> PARSER =
2891              new com.google.protobuf.AbstractParser<FileUnderConstructionFeature>() {
2892            public FileUnderConstructionFeature parsePartialFrom(
2893                com.google.protobuf.CodedInputStream input,
2894                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2895                throws com.google.protobuf.InvalidProtocolBufferException {
2896              return new FileUnderConstructionFeature(input, extensionRegistry);
2897            }
2898          };
2899    
2900          @java.lang.Override
2901          public com.google.protobuf.Parser<FileUnderConstructionFeature> getParserForType() {
2902            return PARSER;
2903          }
2904    
2905          private int bitField0_;
2906          // optional string clientName = 1;
2907          public static final int CLIENTNAME_FIELD_NUMBER = 1;
2908          private java.lang.Object clientName_;
2909          /**
2910           * <code>optional string clientName = 1;</code>
2911           */
2912          public boolean hasClientName() {
2913            return ((bitField0_ & 0x00000001) == 0x00000001);
2914          }
2915          /**
2916           * <code>optional string clientName = 1;</code>
2917           */
2918          public java.lang.String getClientName() {
2919            java.lang.Object ref = clientName_;
2920            if (ref instanceof java.lang.String) {
2921              return (java.lang.String) ref;
2922            } else {
2923              com.google.protobuf.ByteString bs = 
2924                  (com.google.protobuf.ByteString) ref;
2925              java.lang.String s = bs.toStringUtf8();
2926              if (bs.isValidUtf8()) {
2927                clientName_ = s;
2928              }
2929              return s;
2930            }
2931          }
2932          /**
2933           * <code>optional string clientName = 1;</code>
2934           */
2935          public com.google.protobuf.ByteString
2936              getClientNameBytes() {
2937            java.lang.Object ref = clientName_;
2938            if (ref instanceof java.lang.String) {
2939              com.google.protobuf.ByteString b = 
2940                  com.google.protobuf.ByteString.copyFromUtf8(
2941                      (java.lang.String) ref);
2942              clientName_ = b;
2943              return b;
2944            } else {
2945              return (com.google.protobuf.ByteString) ref;
2946            }
2947          }
2948    
2949          // optional string clientMachine = 2;
2950          public static final int CLIENTMACHINE_FIELD_NUMBER = 2;
2951          private java.lang.Object clientMachine_;
2952          /**
2953           * <code>optional string clientMachine = 2;</code>
2954           */
2955          public boolean hasClientMachine() {
2956            return ((bitField0_ & 0x00000002) == 0x00000002);
2957          }
2958          /**
2959           * <code>optional string clientMachine = 2;</code>
2960           */
2961          public java.lang.String getClientMachine() {
2962            java.lang.Object ref = clientMachine_;
2963            if (ref instanceof java.lang.String) {
2964              return (java.lang.String) ref;
2965            } else {
2966              com.google.protobuf.ByteString bs = 
2967                  (com.google.protobuf.ByteString) ref;
2968              java.lang.String s = bs.toStringUtf8();
2969              if (bs.isValidUtf8()) {
2970                clientMachine_ = s;
2971              }
2972              return s;
2973            }
2974          }
2975          /**
2976           * <code>optional string clientMachine = 2;</code>
2977           */
2978          public com.google.protobuf.ByteString
2979              getClientMachineBytes() {
2980            java.lang.Object ref = clientMachine_;
2981            if (ref instanceof java.lang.String) {
2982              com.google.protobuf.ByteString b = 
2983                  com.google.protobuf.ByteString.copyFromUtf8(
2984                      (java.lang.String) ref);
2985              clientMachine_ = b;
2986              return b;
2987            } else {
2988              return (com.google.protobuf.ByteString) ref;
2989            }
2990          }
2991    
2992          private void initFields() {
2993            clientName_ = "";
2994            clientMachine_ = "";
2995          }
2996          private byte memoizedIsInitialized = -1;
2997          public final boolean isInitialized() {
2998            byte isInitialized = memoizedIsInitialized;
2999            if (isInitialized != -1) return isInitialized == 1;
3000    
3001            memoizedIsInitialized = 1;
3002            return true;
3003          }
3004    
3005          public void writeTo(com.google.protobuf.CodedOutputStream output)
3006                              throws java.io.IOException {
3007            getSerializedSize();
3008            if (((bitField0_ & 0x00000001) == 0x00000001)) {
3009              output.writeBytes(1, getClientNameBytes());
3010            }
3011            if (((bitField0_ & 0x00000002) == 0x00000002)) {
3012              output.writeBytes(2, getClientMachineBytes());
3013            }
3014            getUnknownFields().writeTo(output);
3015          }
3016    
3017          private int memoizedSerializedSize = -1;
3018          public int getSerializedSize() {
3019            int size = memoizedSerializedSize;
3020            if (size != -1) return size;
3021    
3022            size = 0;
3023            if (((bitField0_ & 0x00000001) == 0x00000001)) {
3024              size += com.google.protobuf.CodedOutputStream
3025                .computeBytesSize(1, getClientNameBytes());
3026            }
3027            if (((bitField0_ & 0x00000002) == 0x00000002)) {
3028              size += com.google.protobuf.CodedOutputStream
3029                .computeBytesSize(2, getClientMachineBytes());
3030            }
3031            size += getUnknownFields().getSerializedSize();
3032            memoizedSerializedSize = size;
3033            return size;
3034          }
3035    
3036          private static final long serialVersionUID = 0L;
3037          @java.lang.Override
3038          protected java.lang.Object writeReplace()
3039              throws java.io.ObjectStreamException {
3040            return super.writeReplace();
3041          }
3042    
3043          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3044              com.google.protobuf.ByteString data)
3045              throws com.google.protobuf.InvalidProtocolBufferException {
3046            return PARSER.parseFrom(data);
3047          }
3048          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3049              com.google.protobuf.ByteString data,
3050              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3051              throws com.google.protobuf.InvalidProtocolBufferException {
3052            return PARSER.parseFrom(data, extensionRegistry);
3053          }
3054          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(byte[] data)
3055              throws com.google.protobuf.InvalidProtocolBufferException {
3056            return PARSER.parseFrom(data);
3057          }
3058          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3059              byte[] data,
3060              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3061              throws com.google.protobuf.InvalidProtocolBufferException {
3062            return PARSER.parseFrom(data, extensionRegistry);
3063          }
3064          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(java.io.InputStream input)
3065              throws java.io.IOException {
3066            return PARSER.parseFrom(input);
3067          }
3068          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3069              java.io.InputStream input,
3070              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3071              throws java.io.IOException {
3072            return PARSER.parseFrom(input, extensionRegistry);
3073          }
3074          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseDelimitedFrom(java.io.InputStream input)
3075              throws java.io.IOException {
3076            return PARSER.parseDelimitedFrom(input);
3077          }
3078          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseDelimitedFrom(
3079              java.io.InputStream input,
3080              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3081              throws java.io.IOException {
3082            return PARSER.parseDelimitedFrom(input, extensionRegistry);
3083          }
3084          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3085              com.google.protobuf.CodedInputStream input)
3086              throws java.io.IOException {
3087            return PARSER.parseFrom(input);
3088          }
3089          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3090              com.google.protobuf.CodedInputStream input,
3091              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3092              throws java.io.IOException {
3093            return PARSER.parseFrom(input, extensionRegistry);
3094          }
3095    
3096          public static Builder newBuilder() { return Builder.create(); }
3097          public Builder newBuilderForType() { return newBuilder(); }
3098          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature prototype) {
3099            return newBuilder().mergeFrom(prototype);
3100          }
3101          public Builder toBuilder() { return newBuilder(this); }
3102    
3103          @java.lang.Override
3104          protected Builder newBuilderForType(
3105              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3106            Builder builder = new Builder(parent);
3107            return builder;
3108          }
3109          /**
3110           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature}
3111           *
3112           * <pre>
3113           **
3114           * under-construction feature for INodeFile
3115           * </pre>
3116           */
3117          public static final class Builder extends
3118              com.google.protobuf.GeneratedMessage.Builder<Builder>
3119             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder {
3120            public static final com.google.protobuf.Descriptors.Descriptor
3121                getDescriptor() {
3122              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
3123            }
3124    
3125            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3126                internalGetFieldAccessorTable() {
3127              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable
3128                  .ensureFieldAccessorsInitialized(
3129                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder.class);
3130            }
3131    
3132            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.newBuilder()
3133            private Builder() {
3134              maybeForceBuilderInitialization();
3135            }
3136    
3137            private Builder(
3138                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3139              super(parent);
3140              maybeForceBuilderInitialization();
3141            }
3142            private void maybeForceBuilderInitialization() {
3143              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3144              }
3145            }
3146            private static Builder create() {
3147              return new Builder();
3148            }
3149    
3150            public Builder clear() {
3151              super.clear();
3152              clientName_ = "";
3153              bitField0_ = (bitField0_ & ~0x00000001);
3154              clientMachine_ = "";
3155              bitField0_ = (bitField0_ & ~0x00000002);
3156              return this;
3157            }
3158    
3159            public Builder clone() {
3160              return create().mergeFrom(buildPartial());
3161            }
3162    
3163            public com.google.protobuf.Descriptors.Descriptor
3164                getDescriptorForType() {
3165              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
3166            }
3167    
3168            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getDefaultInstanceForType() {
3169              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
3170            }
3171    
3172            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature build() {
3173              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature result = buildPartial();
3174              if (!result.isInitialized()) {
3175                throw newUninitializedMessageException(result);
3176              }
3177              return result;
3178            }
3179    
3180            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature buildPartial() {
3181              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature(this);
3182              int from_bitField0_ = bitField0_;
3183              int to_bitField0_ = 0;
3184              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3185                to_bitField0_ |= 0x00000001;
3186              }
3187              result.clientName_ = clientName_;
3188              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3189                to_bitField0_ |= 0x00000002;
3190              }
3191              result.clientMachine_ = clientMachine_;
3192              result.bitField0_ = to_bitField0_;
3193              onBuilt();
3194              return result;
3195            }
3196    
3197            public Builder mergeFrom(com.google.protobuf.Message other) {
3198              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature) {
3199                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature)other);
3200              } else {
3201                super.mergeFrom(other);
3202                return this;
3203              }
3204            }
3205    
3206            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature other) {
3207              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance()) return this;
3208              if (other.hasClientName()) {
3209                bitField0_ |= 0x00000001;
3210                clientName_ = other.clientName_;
3211                onChanged();
3212              }
3213              if (other.hasClientMachine()) {
3214                bitField0_ |= 0x00000002;
3215                clientMachine_ = other.clientMachine_;
3216                onChanged();
3217              }
3218              this.mergeUnknownFields(other.getUnknownFields());
3219              return this;
3220            }
3221    
3222            public final boolean isInitialized() {
3223              return true;
3224            }
3225    
3226            public Builder mergeFrom(
3227                com.google.protobuf.CodedInputStream input,
3228                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3229                throws java.io.IOException {
3230              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parsedMessage = null;
3231              try {
3232                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3233              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3234                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature) e.getUnfinishedMessage();
3235                throw e;
3236              } finally {
3237                if (parsedMessage != null) {
3238                  mergeFrom(parsedMessage);
3239                }
3240              }
3241              return this;
3242            }
3243            private int bitField0_;
3244    
3245            // optional string clientName = 1;
3246            private java.lang.Object clientName_ = "";
3247            /**
3248             * <code>optional string clientName = 1;</code>
3249             */
3250            public boolean hasClientName() {
3251              return ((bitField0_ & 0x00000001) == 0x00000001);
3252            }
3253            /**
3254             * <code>optional string clientName = 1;</code>
3255             */
3256            public java.lang.String getClientName() {
3257              java.lang.Object ref = clientName_;
3258              if (!(ref instanceof java.lang.String)) {
3259                java.lang.String s = ((com.google.protobuf.ByteString) ref)
3260                    .toStringUtf8();
3261                clientName_ = s;
3262                return s;
3263              } else {
3264                return (java.lang.String) ref;
3265              }
3266            }
3267            /**
3268             * <code>optional string clientName = 1;</code>
3269             */
3270            public com.google.protobuf.ByteString
3271                getClientNameBytes() {
3272              java.lang.Object ref = clientName_;
3273              if (ref instanceof String) {
3274                com.google.protobuf.ByteString b = 
3275                    com.google.protobuf.ByteString.copyFromUtf8(
3276                        (java.lang.String) ref);
3277                clientName_ = b;
3278                return b;
3279              } else {
3280                return (com.google.protobuf.ByteString) ref;
3281              }
3282            }
3283            /**
3284             * <code>optional string clientName = 1;</code>
3285             */
3286            public Builder setClientName(
3287                java.lang.String value) {
3288              if (value == null) {
3289        throw new NullPointerException();
3290      }
3291      bitField0_ |= 0x00000001;
3292              clientName_ = value;
3293              onChanged();
3294              return this;
3295            }
3296            /**
3297             * <code>optional string clientName = 1;</code>
3298             */
3299            public Builder clearClientName() {
3300              bitField0_ = (bitField0_ & ~0x00000001);
3301              clientName_ = getDefaultInstance().getClientName();
3302              onChanged();
3303              return this;
3304            }
3305            /**
3306             * <code>optional string clientName = 1;</code>
3307             */
3308            public Builder setClientNameBytes(
3309                com.google.protobuf.ByteString value) {
3310              if (value == null) {
3311        throw new NullPointerException();
3312      }
3313      bitField0_ |= 0x00000001;
3314              clientName_ = value;
3315              onChanged();
3316              return this;
3317            }
3318    
3319            // optional string clientMachine = 2;
3320            private java.lang.Object clientMachine_ = "";
3321            /**
3322             * <code>optional string clientMachine = 2;</code>
3323             */
3324            public boolean hasClientMachine() {
3325              return ((bitField0_ & 0x00000002) == 0x00000002);
3326            }
3327            /**
3328             * <code>optional string clientMachine = 2;</code>
3329             */
3330            public java.lang.String getClientMachine() {
3331              java.lang.Object ref = clientMachine_;
3332              if (!(ref instanceof java.lang.String)) {
3333                java.lang.String s = ((com.google.protobuf.ByteString) ref)
3334                    .toStringUtf8();
3335                clientMachine_ = s;
3336                return s;
3337              } else {
3338                return (java.lang.String) ref;
3339              }
3340            }
3341            /**
3342             * <code>optional string clientMachine = 2;</code>
3343             */
3344            public com.google.protobuf.ByteString
3345                getClientMachineBytes() {
3346              java.lang.Object ref = clientMachine_;
3347              if (ref instanceof String) {
3348                com.google.protobuf.ByteString b = 
3349                    com.google.protobuf.ByteString.copyFromUtf8(
3350                        (java.lang.String) ref);
3351                clientMachine_ = b;
3352                return b;
3353              } else {
3354                return (com.google.protobuf.ByteString) ref;
3355              }
3356            }
3357            /**
3358             * <code>optional string clientMachine = 2;</code>
3359             */
3360            public Builder setClientMachine(
3361                java.lang.String value) {
3362              if (value == null) {
3363        throw new NullPointerException();
3364      }
3365      bitField0_ |= 0x00000002;
3366              clientMachine_ = value;
3367              onChanged();
3368              return this;
3369            }
3370            /**
3371             * <code>optional string clientMachine = 2;</code>
3372             */
3373            public Builder clearClientMachine() {
3374              bitField0_ = (bitField0_ & ~0x00000002);
3375              clientMachine_ = getDefaultInstance().getClientMachine();
3376              onChanged();
3377              return this;
3378            }
3379            /**
3380             * <code>optional string clientMachine = 2;</code>
3381             */
3382            public Builder setClientMachineBytes(
3383                com.google.protobuf.ByteString value) {
3384              if (value == null) {
3385        throw new NullPointerException();
3386      }
3387      bitField0_ |= 0x00000002;
3388              clientMachine_ = value;
3389              onChanged();
3390              return this;
3391            }
3392    
3393            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature)
3394          }
3395    
3396          static {
3397            defaultInstance = new FileUnderConstructionFeature(true);
3398            defaultInstance.initFields();
3399          }
3400    
3401          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature)
3402        }
3403    
3404        public interface AclFeatureProtoOrBuilder
3405            extends com.google.protobuf.MessageOrBuilder {
3406    
3407          // repeated fixed32 entries = 2 [packed = true];
3408          /**
3409           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3410           *
3411           * <pre>
3412           **
3413           * An ACL entry is represented by a 32-bit integer in Big Endian
3414           * format. The bits can be divided in four segments:
3415           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3416           *
3417           * [0:2) -- reserved for futute uses.
3418           * [2:26) -- the name of the entry, which is an ID that points to a
3419           * string in the StringTableSection.
3420           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3421           * [27:29) -- the type of the entry (AclEntryTypeProto)
3422           * [29:32) -- the permission of the entry (FsActionProto)
3423           * </pre>
3424           */
3425          java.util.List<java.lang.Integer> getEntriesList();
3426          /**
3427           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3428           *
3429           * <pre>
3430           **
3431           * An ACL entry is represented by a 32-bit integer in Big Endian
3432           * format. The bits can be divided in four segments:
3433           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3434           *
3435           * [0:2) -- reserved for futute uses.
3436           * [2:26) -- the name of the entry, which is an ID that points to a
3437           * string in the StringTableSection.
3438           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3439           * [27:29) -- the type of the entry (AclEntryTypeProto)
3440           * [29:32) -- the permission of the entry (FsActionProto)
3441           * </pre>
3442           */
3443          int getEntriesCount();
3444          /**
3445           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3446           *
3447           * <pre>
3448           **
3449           * An ACL entry is represented by a 32-bit integer in Big Endian
3450           * format. The bits can be divided in four segments:
3451           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3452           *
3453           * [0:2) -- reserved for futute uses.
3454           * [2:26) -- the name of the entry, which is an ID that points to a
3455           * string in the StringTableSection.
3456           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3457           * [27:29) -- the type of the entry (AclEntryTypeProto)
3458           * [29:32) -- the permission of the entry (FsActionProto)
3459           * </pre>
3460           */
3461          int getEntries(int index);
3462        }
3463        /**
3464         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.AclFeatureProto}
3465         */
3466        public static final class AclFeatureProto extends
3467            com.google.protobuf.GeneratedMessage
3468            implements AclFeatureProtoOrBuilder {
3469          // Use AclFeatureProto.newBuilder() to construct.
3470          private AclFeatureProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3471            super(builder);
3472            this.unknownFields = builder.getUnknownFields();
3473          }
3474          private AclFeatureProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3475    
3476          private static final AclFeatureProto defaultInstance;
3477          public static AclFeatureProto getDefaultInstance() {
3478            return defaultInstance;
3479          }
3480    
3481          public AclFeatureProto getDefaultInstanceForType() {
3482            return defaultInstance;
3483          }
3484    
3485          private final com.google.protobuf.UnknownFieldSet unknownFields;
3486          @java.lang.Override
3487          public final com.google.protobuf.UnknownFieldSet
3488              getUnknownFields() {
3489            return this.unknownFields;
3490          }
3491          private AclFeatureProto(
3492              com.google.protobuf.CodedInputStream input,
3493              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3494              throws com.google.protobuf.InvalidProtocolBufferException {
3495            initFields();
3496            int mutable_bitField0_ = 0;
3497            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3498                com.google.protobuf.UnknownFieldSet.newBuilder();
3499            try {
3500              boolean done = false;
3501              while (!done) {
3502                int tag = input.readTag();
3503                switch (tag) {
3504                  case 0:
3505                    done = true;
3506                    break;
3507                  default: {
3508                    if (!parseUnknownField(input, unknownFields,
3509                                           extensionRegistry, tag)) {
3510                      done = true;
3511                    }
3512                    break;
3513                  }
3514                  case 21: {
3515                    if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
3516                      entries_ = new java.util.ArrayList<java.lang.Integer>();
3517                      mutable_bitField0_ |= 0x00000001;
3518                    }
3519                    entries_.add(input.readFixed32());
3520                    break;
3521                  }
3522                  case 18: {
3523                    int length = input.readRawVarint32();
3524                    int limit = input.pushLimit(length);
3525                    if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
3526                      entries_ = new java.util.ArrayList<java.lang.Integer>();
3527                      mutable_bitField0_ |= 0x00000001;
3528                    }
3529                    while (input.getBytesUntilLimit() > 0) {
3530                      entries_.add(input.readFixed32());
3531                    }
3532                    input.popLimit(limit);
3533                    break;
3534                  }
3535                }
3536              }
3537            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3538              throw e.setUnfinishedMessage(this);
3539            } catch (java.io.IOException e) {
3540              throw new com.google.protobuf.InvalidProtocolBufferException(
3541                  e.getMessage()).setUnfinishedMessage(this);
3542            } finally {
3543              if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
3544                entries_ = java.util.Collections.unmodifiableList(entries_);
3545              }
3546              this.unknownFields = unknownFields.build();
3547              makeExtensionsImmutable();
3548            }
3549          }
3550          public static final com.google.protobuf.Descriptors.Descriptor
3551              getDescriptor() {
3552            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
3553          }
3554    
3555          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3556              internalGetFieldAccessorTable() {
3557            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable
3558                .ensureFieldAccessorsInitialized(
3559                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder.class);
3560          }
3561    
3562          public static com.google.protobuf.Parser<AclFeatureProto> PARSER =
3563              new com.google.protobuf.AbstractParser<AclFeatureProto>() {
3564            public AclFeatureProto parsePartialFrom(
3565                com.google.protobuf.CodedInputStream input,
3566                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3567                throws com.google.protobuf.InvalidProtocolBufferException {
3568              return new AclFeatureProto(input, extensionRegistry);
3569            }
3570          };
3571    
3572          @java.lang.Override
3573          public com.google.protobuf.Parser<AclFeatureProto> getParserForType() {
3574            return PARSER;
3575          }
3576    
3577          // repeated fixed32 entries = 2 [packed = true];
3578          public static final int ENTRIES_FIELD_NUMBER = 2;
3579          private java.util.List<java.lang.Integer> entries_;
3580          /**
3581           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3582           *
3583           * <pre>
3584           **
3585           * An ACL entry is represented by a 32-bit integer in Big Endian
3586           * format. The bits can be divided in four segments:
3587           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3588           *
3589           * [0:2) -- reserved for futute uses.
3590           * [2:26) -- the name of the entry, which is an ID that points to a
3591           * string in the StringTableSection.
3592           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3593           * [27:29) -- the type of the entry (AclEntryTypeProto)
3594           * [29:32) -- the permission of the entry (FsActionProto)
3595           * </pre>
3596           */
3597          public java.util.List<java.lang.Integer>
3598              getEntriesList() {
3599            return entries_;
3600          }
3601          /**
3602           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3603           *
3604           * <pre>
3605           **
3606           * An ACL entry is represented by a 32-bit integer in Big Endian
3607           * format. The bits can be divided in four segments:
3608           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3609           *
3610           * [0:2) -- reserved for futute uses.
3611           * [2:26) -- the name of the entry, which is an ID that points to a
3612           * string in the StringTableSection.
3613           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3614           * [27:29) -- the type of the entry (AclEntryTypeProto)
3615           * [29:32) -- the permission of the entry (FsActionProto)
3616           * </pre>
3617           */
3618          public int getEntriesCount() {
3619            return entries_.size();
3620          }
3621          /**
3622           * <code>repeated fixed32 entries = 2 [packed = true];</code>
3623           *
3624           * <pre>
3625           **
3626           * An ACL entry is represented by a 32-bit integer in Big Endian
3627           * format. The bits can be divided in four segments:
3628           * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3629           *
3630           * [0:2) -- reserved for futute uses.
3631           * [2:26) -- the name of the entry, which is an ID that points to a
3632           * string in the StringTableSection.
3633           * [26:27) -- the scope of the entry (AclEntryScopeProto)
3634           * [27:29) -- the type of the entry (AclEntryTypeProto)
3635           * [29:32) -- the permission of the entry (FsActionProto)
3636           * </pre>
3637           */
3638          public int getEntries(int index) {
3639            return entries_.get(index);
3640          }
3641          private int entriesMemoizedSerializedSize = -1;
3642    
3643          private void initFields() {
3644            entries_ = java.util.Collections.emptyList();
3645          }
3646          private byte memoizedIsInitialized = -1;
3647          public final boolean isInitialized() {
3648            byte isInitialized = memoizedIsInitialized;
3649            if (isInitialized != -1) return isInitialized == 1;
3650    
3651            memoizedIsInitialized = 1;
3652            return true;
3653          }
3654    
3655          public void writeTo(com.google.protobuf.CodedOutputStream output)
3656                              throws java.io.IOException {
3657            getSerializedSize();
3658            if (getEntriesList().size() > 0) {
3659              output.writeRawVarint32(18);
3660              output.writeRawVarint32(entriesMemoizedSerializedSize);
3661            }
3662            for (int i = 0; i < entries_.size(); i++) {
3663              output.writeFixed32NoTag(entries_.get(i));
3664            }
3665            getUnknownFields().writeTo(output);
3666          }
3667    
3668          private int memoizedSerializedSize = -1;
3669          public int getSerializedSize() {
3670            int size = memoizedSerializedSize;
3671            if (size != -1) return size;
3672    
3673            size = 0;
3674            {
3675              int dataSize = 0;
3676              dataSize = 4 * getEntriesList().size();
3677              size += dataSize;
3678              if (!getEntriesList().isEmpty()) {
3679                size += 1;
3680                size += com.google.protobuf.CodedOutputStream
3681                    .computeInt32SizeNoTag(dataSize);
3682              }
3683              entriesMemoizedSerializedSize = dataSize;
3684            }
3685            size += getUnknownFields().getSerializedSize();
3686            memoizedSerializedSize = size;
3687            return size;
3688          }
3689    
3690          private static final long serialVersionUID = 0L;
3691          @java.lang.Override
3692          protected java.lang.Object writeReplace()
3693              throws java.io.ObjectStreamException {
3694            return super.writeReplace();
3695          }
3696    
3697          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3698              com.google.protobuf.ByteString data)
3699              throws com.google.protobuf.InvalidProtocolBufferException {
3700            return PARSER.parseFrom(data);
3701          }
3702          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3703              com.google.protobuf.ByteString data,
3704              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3705              throws com.google.protobuf.InvalidProtocolBufferException {
3706            return PARSER.parseFrom(data, extensionRegistry);
3707          }
3708          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(byte[] data)
3709              throws com.google.protobuf.InvalidProtocolBufferException {
3710            return PARSER.parseFrom(data);
3711          }
3712          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3713              byte[] data,
3714              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3715              throws com.google.protobuf.InvalidProtocolBufferException {
3716            return PARSER.parseFrom(data, extensionRegistry);
3717          }
3718          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(java.io.InputStream input)
3719              throws java.io.IOException {
3720            return PARSER.parseFrom(input);
3721          }
3722          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3723              java.io.InputStream input,
3724              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3725              throws java.io.IOException {
3726            return PARSER.parseFrom(input, extensionRegistry);
3727          }
3728          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseDelimitedFrom(java.io.InputStream input)
3729              throws java.io.IOException {
3730            return PARSER.parseDelimitedFrom(input);
3731          }
3732          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseDelimitedFrom(
3733              java.io.InputStream input,
3734              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3735              throws java.io.IOException {
3736            return PARSER.parseDelimitedFrom(input, extensionRegistry);
3737          }
3738          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3739              com.google.protobuf.CodedInputStream input)
3740              throws java.io.IOException {
3741            return PARSER.parseFrom(input);
3742          }
3743          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3744              com.google.protobuf.CodedInputStream input,
3745              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3746              throws java.io.IOException {
3747            return PARSER.parseFrom(input, extensionRegistry);
3748          }
3749    
3750          public static Builder newBuilder() { return Builder.create(); }
3751          public Builder newBuilderForType() { return newBuilder(); }
3752          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto prototype) {
3753            return newBuilder().mergeFrom(prototype);
3754          }
3755          public Builder toBuilder() { return newBuilder(this); }
3756    
3757          @java.lang.Override
3758          protected Builder newBuilderForType(
3759              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3760            Builder builder = new Builder(parent);
3761            return builder;
3762          }
3763          /**
3764           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.AclFeatureProto}
3765           */
3766          public static final class Builder extends
3767              com.google.protobuf.GeneratedMessage.Builder<Builder>
3768             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder {
3769            public static final com.google.protobuf.Descriptors.Descriptor
3770                getDescriptor() {
3771              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
3772            }
3773    
3774            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3775                internalGetFieldAccessorTable() {
3776              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable
3777                  .ensureFieldAccessorsInitialized(
3778                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder.class);
3779            }
3780    
3781            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.newBuilder()
3782            private Builder() {
3783              maybeForceBuilderInitialization();
3784            }
3785    
3786            private Builder(
3787                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3788              super(parent);
3789              maybeForceBuilderInitialization();
3790            }
3791            private void maybeForceBuilderInitialization() {
3792              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3793              }
3794            }
3795            private static Builder create() {
3796              return new Builder();
3797            }
3798    
3799            public Builder clear() {
3800              super.clear();
3801              entries_ = java.util.Collections.emptyList();
3802              bitField0_ = (bitField0_ & ~0x00000001);
3803              return this;
3804            }
3805    
3806            public Builder clone() {
3807              return create().mergeFrom(buildPartial());
3808            }
3809    
3810            public com.google.protobuf.Descriptors.Descriptor
3811                getDescriptorForType() {
3812              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
3813            }
3814    
3815            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getDefaultInstanceForType() {
3816              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
3817            }
3818    
3819            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto build() {
3820              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto result = buildPartial();
3821              if (!result.isInitialized()) {
3822                throw newUninitializedMessageException(result);
3823              }
3824              return result;
3825            }
3826    
3827            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto buildPartial() {
3828              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto(this);
3829              int from_bitField0_ = bitField0_;
3830              if (((bitField0_ & 0x00000001) == 0x00000001)) {
3831                entries_ = java.util.Collections.unmodifiableList(entries_);
3832                bitField0_ = (bitField0_ & ~0x00000001);
3833              }
3834              result.entries_ = entries_;
3835              onBuilt();
3836              return result;
3837            }
3838    
3839            public Builder mergeFrom(com.google.protobuf.Message other) {
3840              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto) {
3841                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto)other);
3842              } else {
3843                super.mergeFrom(other);
3844                return this;
3845              }
3846            }
3847    
3848            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto other) {
3849              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance()) return this;
3850              if (!other.entries_.isEmpty()) {
3851                if (entries_.isEmpty()) {
3852                  entries_ = other.entries_;
3853                  bitField0_ = (bitField0_ & ~0x00000001);
3854                } else {
3855                  ensureEntriesIsMutable();
3856                  entries_.addAll(other.entries_);
3857                }
3858                onChanged();
3859              }
3860              this.mergeUnknownFields(other.getUnknownFields());
3861              return this;
3862            }
3863    
3864            public final boolean isInitialized() {
3865              return true;
3866            }
3867    
3868            public Builder mergeFrom(
3869                com.google.protobuf.CodedInputStream input,
3870                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3871                throws java.io.IOException {
3872              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parsedMessage = null;
3873              try {
3874                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3875              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3876                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto) e.getUnfinishedMessage();
3877                throw e;
3878              } finally {
3879                if (parsedMessage != null) {
3880                  mergeFrom(parsedMessage);
3881                }
3882              }
3883              return this;
3884            }
3885            private int bitField0_;
3886    
3887            // repeated fixed32 entries = 2 [packed = true];
3888            private java.util.List<java.lang.Integer> entries_ = java.util.Collections.emptyList();
3889            private void ensureEntriesIsMutable() {
3890              if (!((bitField0_ & 0x00000001) == 0x00000001)) {
3891                entries_ = new java.util.ArrayList<java.lang.Integer>(entries_);
3892                bitField0_ |= 0x00000001;
3893               }
3894            }
3895            /**
3896             * <code>repeated fixed32 entries = 2 [packed = true];</code>
3897             *
3898             * <pre>
3899             **
3900             * An ACL entry is represented by a 32-bit integer in Big Endian
3901             * format. The bits can be divided in four segments:
3902             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3903             *
3904             * [0:2) -- reserved for futute uses.
3905             * [2:26) -- the name of the entry, which is an ID that points to a
3906             * string in the StringTableSection.
3907             * [26:27) -- the scope of the entry (AclEntryScopeProto)
3908             * [27:29) -- the type of the entry (AclEntryTypeProto)
3909             * [29:32) -- the permission of the entry (FsActionProto)
3910             * </pre>
3911             */
3912            public java.util.List<java.lang.Integer>
3913                getEntriesList() {
3914              return java.util.Collections.unmodifiableList(entries_);
3915            }
3916            /**
3917             * <code>repeated fixed32 entries = 2 [packed = true];</code>
3918             *
3919             * <pre>
3920             **
3921             * An ACL entry is represented by a 32-bit integer in Big Endian
3922             * format. The bits can be divided in four segments:
3923             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3924             *
3925             * [0:2) -- reserved for futute uses.
3926             * [2:26) -- the name of the entry, which is an ID that points to a
3927             * string in the StringTableSection.
3928             * [26:27) -- the scope of the entry (AclEntryScopeProto)
3929             * [27:29) -- the type of the entry (AclEntryTypeProto)
3930             * [29:32) -- the permission of the entry (FsActionProto)
3931             * </pre>
3932             */
3933            public int getEntriesCount() {
3934              return entries_.size();
3935            }
3936            /**
3937             * <code>repeated fixed32 entries = 2 [packed = true];</code>
3938             *
3939             * <pre>
3940             **
3941             * An ACL entry is represented by a 32-bit integer in Big Endian
3942             * format. The bits can be divided in four segments:
3943             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3944             *
3945             * [0:2) -- reserved for futute uses.
3946             * [2:26) -- the name of the entry, which is an ID that points to a
3947             * string in the StringTableSection.
3948             * [26:27) -- the scope of the entry (AclEntryScopeProto)
3949             * [27:29) -- the type of the entry (AclEntryTypeProto)
3950             * [29:32) -- the permission of the entry (FsActionProto)
3951             * </pre>
3952             */
3953            public int getEntries(int index) {
3954              return entries_.get(index);
3955            }
3956            /**
3957             * <code>repeated fixed32 entries = 2 [packed = true];</code>
3958             *
3959             * <pre>
3960             **
3961             * An ACL entry is represented by a 32-bit integer in Big Endian
3962             * format. The bits can be divided in four segments:
3963             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3964             *
3965             * [0:2) -- reserved for futute uses.
3966             * [2:26) -- the name of the entry, which is an ID that points to a
3967             * string in the StringTableSection.
3968             * [26:27) -- the scope of the entry (AclEntryScopeProto)
3969             * [27:29) -- the type of the entry (AclEntryTypeProto)
3970             * [29:32) -- the permission of the entry (FsActionProto)
3971             * </pre>
3972             */
3973            public Builder setEntries(
3974                int index, int value) {
3975              ensureEntriesIsMutable();
3976              entries_.set(index, value);
3977              onChanged();
3978              return this;
3979            }
3980            /**
3981             * <code>repeated fixed32 entries = 2 [packed = true];</code>
3982             *
3983             * <pre>
3984             **
3985             * An ACL entry is represented by a 32-bit integer in Big Endian
3986             * format. The bits can be divided in four segments:
3987             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3988             *
3989             * [0:2) -- reserved for futute uses.
3990             * [2:26) -- the name of the entry, which is an ID that points to a
3991             * string in the StringTableSection.
3992             * [26:27) -- the scope of the entry (AclEntryScopeProto)
3993             * [27:29) -- the type of the entry (AclEntryTypeProto)
3994             * [29:32) -- the permission of the entry (FsActionProto)
3995             * </pre>
3996             */
3997            public Builder addEntries(int value) {
3998              ensureEntriesIsMutable();
3999              entries_.add(value);
4000              onChanged();
4001              return this;
4002            }
4003            /**
4004             * <code>repeated fixed32 entries = 2 [packed = true];</code>
4005             *
4006             * <pre>
4007             **
4008             * An ACL entry is represented by a 32-bit integer in Big Endian
4009             * format. The bits can be divided in four segments:
4010             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
4011             *
4012             * [0:2) -- reserved for futute uses.
4013             * [2:26) -- the name of the entry, which is an ID that points to a
4014             * string in the StringTableSection.
4015             * [26:27) -- the scope of the entry (AclEntryScopeProto)
4016             * [27:29) -- the type of the entry (AclEntryTypeProto)
4017             * [29:32) -- the permission of the entry (FsActionProto)
4018             * </pre>
4019             */
4020            public Builder addAllEntries(
4021                java.lang.Iterable<? extends java.lang.Integer> values) {
4022              ensureEntriesIsMutable();
4023              super.addAll(values, entries_);
4024              onChanged();
4025              return this;
4026            }
4027            /**
4028             * <code>repeated fixed32 entries = 2 [packed = true];</code>
4029             *
4030             * <pre>
4031             **
4032             * An ACL entry is represented by a 32-bit integer in Big Endian
4033             * format. The bits can be divided in four segments:
4034             * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
4035             *
4036             * [0:2) -- reserved for futute uses.
4037             * [2:26) -- the name of the entry, which is an ID that points to a
4038             * string in the StringTableSection.
4039             * [26:27) -- the scope of the entry (AclEntryScopeProto)
4040             * [27:29) -- the type of the entry (AclEntryTypeProto)
4041             * [29:32) -- the permission of the entry (FsActionProto)
4042             * </pre>
4043             */
4044            public Builder clearEntries() {
4045              entries_ = java.util.Collections.emptyList();
4046              bitField0_ = (bitField0_ & ~0x00000001);
4047              onChanged();
4048              return this;
4049            }
4050    
4051            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.AclFeatureProto)
4052          }
4053    
4054          static {
4055            defaultInstance = new AclFeatureProto(true);
4056            defaultInstance.initFields();
4057          }
4058    
4059          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.AclFeatureProto)
4060        }
4061    
4062        public interface XAttrCompactProtoOrBuilder
4063            extends com.google.protobuf.MessageOrBuilder {
4064    
4065          // required fixed32 name = 1;
4066          /**
4067           * <code>required fixed32 name = 1;</code>
4068           *
4069           * <pre>
4070           **
4071           * 
4072           * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4073           * [2:26) -- the name of the entry, which is an ID that points to a
4074           * string in the StringTableSection.
4075           * [26:27) -- namespace extension. Originally there were only 4 namespaces
4076           * so only 2 bits were needed. At that time, this bit was reserved. When a
4077           * 5th namespace was created (raw) this bit became used as a 3rd namespace
4078           * bit.
4079           * [27:32) -- reserved for future uses.
4080           * </pre>
4081           */
4082          boolean hasName();
4083          /**
4084           * <code>required fixed32 name = 1;</code>
4085           *
4086           * <pre>
4087           **
4088           * 
4089           * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4090           * [2:26) -- the name of the entry, which is an ID that points to a
4091           * string in the StringTableSection.
4092           * [26:27) -- namespace extension. Originally there were only 4 namespaces
4093           * so only 2 bits were needed. At that time, this bit was reserved. When a
4094           * 5th namespace was created (raw) this bit became used as a 3rd namespace
4095           * bit.
4096           * [27:32) -- reserved for future uses.
4097           * </pre>
4098           */
4099          int getName();
4100    
4101          // optional bytes value = 2;
4102          /**
4103           * <code>optional bytes value = 2;</code>
4104           */
4105          boolean hasValue();
4106          /**
4107           * <code>optional bytes value = 2;</code>
4108           */
4109          com.google.protobuf.ByteString getValue();
4110        }
4111        /**
4112         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto}
4113         */
4114        public static final class XAttrCompactProto extends
4115            com.google.protobuf.GeneratedMessage
4116            implements XAttrCompactProtoOrBuilder {
4117          // Use XAttrCompactProto.newBuilder() to construct.
4118          private XAttrCompactProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4119            super(builder);
4120            this.unknownFields = builder.getUnknownFields();
4121          }
4122          private XAttrCompactProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4123    
4124          private static final XAttrCompactProto defaultInstance;
4125          public static XAttrCompactProto getDefaultInstance() {
4126            return defaultInstance;
4127          }
4128    
4129          public XAttrCompactProto getDefaultInstanceForType() {
4130            return defaultInstance;
4131          }
4132    
4133          private final com.google.protobuf.UnknownFieldSet unknownFields;
4134          @java.lang.Override
4135          public final com.google.protobuf.UnknownFieldSet
4136              getUnknownFields() {
4137            return this.unknownFields;
4138          }
4139          private XAttrCompactProto(
4140              com.google.protobuf.CodedInputStream input,
4141              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4142              throws com.google.protobuf.InvalidProtocolBufferException {
4143            initFields();
4144            int mutable_bitField0_ = 0;
4145            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4146                com.google.protobuf.UnknownFieldSet.newBuilder();
4147            try {
4148              boolean done = false;
4149              while (!done) {
4150                int tag = input.readTag();
4151                switch (tag) {
4152                  case 0:
4153                    done = true;
4154                    break;
4155                  default: {
4156                    if (!parseUnknownField(input, unknownFields,
4157                                           extensionRegistry, tag)) {
4158                      done = true;
4159                    }
4160                    break;
4161                  }
4162                  case 13: {
4163                    bitField0_ |= 0x00000001;
4164                    name_ = input.readFixed32();
4165                    break;
4166                  }
4167                  case 18: {
4168                    bitField0_ |= 0x00000002;
4169                    value_ = input.readBytes();
4170                    break;
4171                  }
4172                }
4173              }
4174            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4175              throw e.setUnfinishedMessage(this);
4176            } catch (java.io.IOException e) {
4177              throw new com.google.protobuf.InvalidProtocolBufferException(
4178                  e.getMessage()).setUnfinishedMessage(this);
4179            } finally {
4180              this.unknownFields = unknownFields.build();
4181              makeExtensionsImmutable();
4182            }
4183          }
4184          public static final com.google.protobuf.Descriptors.Descriptor
4185              getDescriptor() {
4186            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor;
4187          }
4188    
4189          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4190              internalGetFieldAccessorTable() {
4191            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_fieldAccessorTable
4192                .ensureFieldAccessorsInitialized(
4193                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder.class);
4194          }
4195    
4196          public static com.google.protobuf.Parser<XAttrCompactProto> PARSER =
4197              new com.google.protobuf.AbstractParser<XAttrCompactProto>() {
4198            public XAttrCompactProto parsePartialFrom(
4199                com.google.protobuf.CodedInputStream input,
4200                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4201                throws com.google.protobuf.InvalidProtocolBufferException {
4202              return new XAttrCompactProto(input, extensionRegistry);
4203            }
4204          };
4205    
4206          @java.lang.Override
4207          public com.google.protobuf.Parser<XAttrCompactProto> getParserForType() {
4208            return PARSER;
4209          }
4210    
4211          private int bitField0_;
4212          // required fixed32 name = 1;
4213          public static final int NAME_FIELD_NUMBER = 1;
4214          private int name_;
4215          /**
4216           * <code>required fixed32 name = 1;</code>
4217           *
4218           * <pre>
4219           **
4220           * 
4221           * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4222           * [2:26) -- the name of the entry, which is an ID that points to a
4223           * string in the StringTableSection.
4224           * [26:27) -- namespace extension. Originally there were only 4 namespaces
4225           * so only 2 bits were needed. At that time, this bit was reserved. When a
4226           * 5th namespace was created (raw) this bit became used as a 3rd namespace
4227           * bit.
4228           * [27:32) -- reserved for future uses.
4229           * </pre>
4230           */
4231          public boolean hasName() {
4232            return ((bitField0_ & 0x00000001) == 0x00000001);
4233          }
4234          /**
4235           * <code>required fixed32 name = 1;</code>
4236           *
4237           * <pre>
4238           **
4239           * 
4240           * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4241           * [2:26) -- the name of the entry, which is an ID that points to a
4242           * string in the StringTableSection.
4243           * [26:27) -- namespace extension. Originally there were only 4 namespaces
4244           * so only 2 bits were needed. At that time, this bit was reserved. When a
4245           * 5th namespace was created (raw) this bit became used as a 3rd namespace
4246           * bit.
4247           * [27:32) -- reserved for future uses.
4248           * </pre>
4249           */
4250          public int getName() {
4251            return name_;
4252          }
4253    
4254          // optional bytes value = 2;
4255          public static final int VALUE_FIELD_NUMBER = 2;
4256          private com.google.protobuf.ByteString value_;
4257          /**
4258           * <code>optional bytes value = 2;</code>
4259           */
4260          public boolean hasValue() {
4261            return ((bitField0_ & 0x00000002) == 0x00000002);
4262          }
4263          /**
4264           * <code>optional bytes value = 2;</code>
4265           */
4266          public com.google.protobuf.ByteString getValue() {
4267            return value_;
4268          }
4269    
4270          private void initFields() {
4271            name_ = 0;
4272            value_ = com.google.protobuf.ByteString.EMPTY;
4273          }
4274          private byte memoizedIsInitialized = -1;
4275          public final boolean isInitialized() {
4276            byte isInitialized = memoizedIsInitialized;
4277            if (isInitialized != -1) return isInitialized == 1;
4278    
4279            if (!hasName()) {
4280              memoizedIsInitialized = 0;
4281              return false;
4282            }
4283            memoizedIsInitialized = 1;
4284            return true;
4285          }
4286    
4287          public void writeTo(com.google.protobuf.CodedOutputStream output)
4288                              throws java.io.IOException {
4289            getSerializedSize();
4290            if (((bitField0_ & 0x00000001) == 0x00000001)) {
4291              output.writeFixed32(1, name_);
4292            }
4293            if (((bitField0_ & 0x00000002) == 0x00000002)) {
4294              output.writeBytes(2, value_);
4295            }
4296            getUnknownFields().writeTo(output);
4297          }
4298    
4299          private int memoizedSerializedSize = -1;
4300          public int getSerializedSize() {
4301            int size = memoizedSerializedSize;
4302            if (size != -1) return size;
4303    
4304            size = 0;
4305            if (((bitField0_ & 0x00000001) == 0x00000001)) {
4306              size += com.google.protobuf.CodedOutputStream
4307                .computeFixed32Size(1, name_);
4308            }
4309            if (((bitField0_ & 0x00000002) == 0x00000002)) {
4310              size += com.google.protobuf.CodedOutputStream
4311                .computeBytesSize(2, value_);
4312            }
4313            size += getUnknownFields().getSerializedSize();
4314            memoizedSerializedSize = size;
4315            return size;
4316          }
4317    
4318          private static final long serialVersionUID = 0L;
4319          @java.lang.Override
4320          protected java.lang.Object writeReplace()
4321              throws java.io.ObjectStreamException {
4322            return super.writeReplace();
4323          }
4324    
4325          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4326              com.google.protobuf.ByteString data)
4327              throws com.google.protobuf.InvalidProtocolBufferException {
4328            return PARSER.parseFrom(data);
4329          }
4330          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4331              com.google.protobuf.ByteString data,
4332              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4333              throws com.google.protobuf.InvalidProtocolBufferException {
4334            return PARSER.parseFrom(data, extensionRegistry);
4335          }
4336          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(byte[] data)
4337              throws com.google.protobuf.InvalidProtocolBufferException {
4338            return PARSER.parseFrom(data);
4339          }
4340          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4341              byte[] data,
4342              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4343              throws com.google.protobuf.InvalidProtocolBufferException {
4344            return PARSER.parseFrom(data, extensionRegistry);
4345          }
4346          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(java.io.InputStream input)
4347              throws java.io.IOException {
4348            return PARSER.parseFrom(input);
4349          }
4350          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4351              java.io.InputStream input,
4352              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4353              throws java.io.IOException {
4354            return PARSER.parseFrom(input, extensionRegistry);
4355          }
4356          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseDelimitedFrom(java.io.InputStream input)
4357              throws java.io.IOException {
4358            return PARSER.parseDelimitedFrom(input);
4359          }
4360          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseDelimitedFrom(
4361              java.io.InputStream input,
4362              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4363              throws java.io.IOException {
4364            return PARSER.parseDelimitedFrom(input, extensionRegistry);
4365          }
4366          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4367              com.google.protobuf.CodedInputStream input)
4368              throws java.io.IOException {
4369            return PARSER.parseFrom(input);
4370          }
4371          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4372              com.google.protobuf.CodedInputStream input,
4373              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4374              throws java.io.IOException {
4375            return PARSER.parseFrom(input, extensionRegistry);
4376          }
4377    
4378          public static Builder newBuilder() { return Builder.create(); }
4379          public Builder newBuilderForType() { return newBuilder(); }
4380          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto prototype) {
4381            return newBuilder().mergeFrom(prototype);
4382          }
4383          public Builder toBuilder() { return newBuilder(this); }
4384    
4385          @java.lang.Override
4386          protected Builder newBuilderForType(
4387              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4388            Builder builder = new Builder(parent);
4389            return builder;
4390          }
4391          /**
4392           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto}
4393           */
4394          public static final class Builder extends
4395              com.google.protobuf.GeneratedMessage.Builder<Builder>
4396             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder {
4397            public static final com.google.protobuf.Descriptors.Descriptor
4398                getDescriptor() {
4399              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor;
4400            }
4401    
4402            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4403                internalGetFieldAccessorTable() {
4404              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_fieldAccessorTable
4405                  .ensureFieldAccessorsInitialized(
4406                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder.class);
4407            }
4408    
4409            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.newBuilder()
4410            private Builder() {
4411              maybeForceBuilderInitialization();
4412            }
4413    
4414            private Builder(
4415                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4416              super(parent);
4417              maybeForceBuilderInitialization();
4418            }
4419            private void maybeForceBuilderInitialization() {
4420              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4421              }
4422            }
4423            private static Builder create() {
4424              return new Builder();
4425            }
4426    
4427            public Builder clear() {
4428              super.clear();
4429              name_ = 0;
4430              bitField0_ = (bitField0_ & ~0x00000001);
4431              value_ = com.google.protobuf.ByteString.EMPTY;
4432              bitField0_ = (bitField0_ & ~0x00000002);
4433              return this;
4434            }
4435    
4436            public Builder clone() {
4437              return create().mergeFrom(buildPartial());
4438            }
4439    
4440            public com.google.protobuf.Descriptors.Descriptor
4441                getDescriptorForType() {
4442              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor;
4443            }
4444    
4445            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto getDefaultInstanceForType() {
4446              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.getDefaultInstance();
4447            }
4448    
4449            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto build() {
4450              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto result = buildPartial();
4451              if (!result.isInitialized()) {
4452                throw newUninitializedMessageException(result);
4453              }
4454              return result;
4455            }
4456    
4457            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto buildPartial() {
4458              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto(this);
4459              int from_bitField0_ = bitField0_;
4460              int to_bitField0_ = 0;
4461              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4462                to_bitField0_ |= 0x00000001;
4463              }
4464              result.name_ = name_;
4465              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4466                to_bitField0_ |= 0x00000002;
4467              }
4468              result.value_ = value_;
4469              result.bitField0_ = to_bitField0_;
4470              onBuilt();
4471              return result;
4472            }
4473    
4474            public Builder mergeFrom(com.google.protobuf.Message other) {
4475              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto) {
4476                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto)other);
4477              } else {
4478                super.mergeFrom(other);
4479                return this;
4480              }
4481            }
4482    
4483            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto other) {
4484              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.getDefaultInstance()) return this;
4485              if (other.hasName()) {
4486                setName(other.getName());
4487              }
4488              if (other.hasValue()) {
4489                setValue(other.getValue());
4490              }
4491              this.mergeUnknownFields(other.getUnknownFields());
4492              return this;
4493            }
4494    
4495            public final boolean isInitialized() {
4496              if (!hasName()) {
4497                
4498                return false;
4499              }
4500              return true;
4501            }
4502    
4503            public Builder mergeFrom(
4504                com.google.protobuf.CodedInputStream input,
4505                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4506                throws java.io.IOException {
4507              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parsedMessage = null;
4508              try {
4509                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4510              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4511                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto) e.getUnfinishedMessage();
4512                throw e;
4513              } finally {
4514                if (parsedMessage != null) {
4515                  mergeFrom(parsedMessage);
4516                }
4517              }
4518              return this;
4519            }
4520            private int bitField0_;
4521    
4522            // required fixed32 name = 1;
4523            private int name_ ;
4524            /**
4525             * <code>required fixed32 name = 1;</code>
4526             *
4527             * <pre>
4528             **
4529             * 
4530             * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4531             * [2:26) -- the name of the entry, which is an ID that points to a
4532             * string in the StringTableSection.
4533             * [26:27) -- namespace extension. Originally there were only 4 namespaces
4534             * so only 2 bits were needed. At that time, this bit was reserved. When a
4535             * 5th namespace was created (raw) this bit became used as a 3rd namespace
4536             * bit.
4537             * [27:32) -- reserved for future uses.
4538             * </pre>
4539             */
4540            public boolean hasName() {
4541              return ((bitField0_ & 0x00000001) == 0x00000001);
4542            }
4543            /**
4544             * <code>required fixed32 name = 1;</code>
4545             *
4546             * <pre>
4547             **
4548             * 
4549             * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4550             * [2:26) -- the name of the entry, which is an ID that points to a
4551             * string in the StringTableSection.
4552             * [26:27) -- namespace extension. Originally there were only 4 namespaces
4553             * so only 2 bits were needed. At that time, this bit was reserved. When a
4554             * 5th namespace was created (raw) this bit became used as a 3rd namespace
4555             * bit.
4556             * [27:32) -- reserved for future uses.
4557             * </pre>
4558             */
4559            public int getName() {
4560              return name_;
4561            }
4562            /**
4563             * <code>required fixed32 name = 1;</code>
4564             *
4565             * <pre>
4566             **
4567             * 
4568             * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4569             * [2:26) -- the name of the entry, which is an ID that points to a
4570             * string in the StringTableSection.
4571             * [26:27) -- namespace extension. Originally there were only 4 namespaces
4572             * so only 2 bits were needed. At that time, this bit was reserved. When a
4573             * 5th namespace was created (raw) this bit became used as a 3rd namespace
4574             * bit.
4575             * [27:32) -- reserved for future uses.
4576             * </pre>
4577             */
4578            public Builder setName(int value) {
4579              bitField0_ |= 0x00000001;
4580              name_ = value;
4581              onChanged();
4582              return this;
4583            }
4584            /**
4585             * <code>required fixed32 name = 1;</code>
4586             *
4587             * <pre>
4588             **
4589             * 
4590             * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4591             * [2:26) -- the name of the entry, which is an ID that points to a
4592             * string in the StringTableSection.
4593             * [26:27) -- namespace extension. Originally there were only 4 namespaces
4594             * so only 2 bits were needed. At that time, this bit was reserved. When a
4595             * 5th namespace was created (raw) this bit became used as a 3rd namespace
4596             * bit.
4597             * [27:32) -- reserved for future uses.
4598             * </pre>
4599             */
4600            public Builder clearName() {
4601              bitField0_ = (bitField0_ & ~0x00000001);
4602              name_ = 0;
4603              onChanged();
4604              return this;
4605            }
4606    
4607            // optional bytes value = 2;
4608            private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY;
4609            /**
4610             * <code>optional bytes value = 2;</code>
4611             */
4612            public boolean hasValue() {
4613              return ((bitField0_ & 0x00000002) == 0x00000002);
4614            }
4615            /**
4616             * <code>optional bytes value = 2;</code>
4617             */
4618            public com.google.protobuf.ByteString getValue() {
4619              return value_;
4620            }
4621            /**
4622             * <code>optional bytes value = 2;</code>
4623             */
4624            public Builder setValue(com.google.protobuf.ByteString value) {
4625              if (value == null) {
4626        throw new NullPointerException();
4627      }
4628      bitField0_ |= 0x00000002;
4629              value_ = value;
4630              onChanged();
4631              return this;
4632            }
4633            /**
4634             * <code>optional bytes value = 2;</code>
4635             */
4636            public Builder clearValue() {
4637              bitField0_ = (bitField0_ & ~0x00000002);
4638              value_ = getDefaultInstance().getValue();
4639              onChanged();
4640              return this;
4641            }
4642    
4643            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto)
4644          }
4645    
4646          static {
4647            defaultInstance = new XAttrCompactProto(true);
4648            defaultInstance.initFields();
4649          }
4650    
4651          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto)
4652        }
4653    
4654        public interface XAttrFeatureProtoOrBuilder
4655            extends com.google.protobuf.MessageOrBuilder {
4656    
4657          // repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;
4658          /**
4659           * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4660           */
4661          java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> 
4662              getXAttrsList();
4663          /**
4664           * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4665           */
4666          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto getXAttrs(int index);
4667          /**
4668           * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4669           */
4670          int getXAttrsCount();
4671          /**
4672           * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4673           */
4674          java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder> 
4675              getXAttrsOrBuilderList();
4676          /**
4677           * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4678           */
4679          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder getXAttrsOrBuilder(
4680              int index);
4681        }
4682        /**
4683         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto}
4684         */
4685        public static final class XAttrFeatureProto extends
4686            com.google.protobuf.GeneratedMessage
4687            implements XAttrFeatureProtoOrBuilder {
4688          // Use XAttrFeatureProto.newBuilder() to construct.
4689          private XAttrFeatureProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4690            super(builder);
4691            this.unknownFields = builder.getUnknownFields();
4692          }
4693          private XAttrFeatureProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4694    
4695          private static final XAttrFeatureProto defaultInstance;
4696          public static XAttrFeatureProto getDefaultInstance() {
4697            return defaultInstance;
4698          }
4699    
4700          public XAttrFeatureProto getDefaultInstanceForType() {
4701            return defaultInstance;
4702          }
4703    
4704          private final com.google.protobuf.UnknownFieldSet unknownFields;
4705          @java.lang.Override
4706          public final com.google.protobuf.UnknownFieldSet
4707              getUnknownFields() {
4708            return this.unknownFields;
4709          }
4710          private XAttrFeatureProto(
4711              com.google.protobuf.CodedInputStream input,
4712              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4713              throws com.google.protobuf.InvalidProtocolBufferException {
4714            initFields();
4715            int mutable_bitField0_ = 0;
4716            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4717                com.google.protobuf.UnknownFieldSet.newBuilder();
4718            try {
4719              boolean done = false;
4720              while (!done) {
4721                int tag = input.readTag();
4722                switch (tag) {
4723                  case 0:
4724                    done = true;
4725                    break;
4726                  default: {
4727                    if (!parseUnknownField(input, unknownFields,
4728                                           extensionRegistry, tag)) {
4729                      done = true;
4730                    }
4731                    break;
4732                  }
4733                  case 10: {
4734                    if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4735                      xAttrs_ = new java.util.ArrayList<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto>();
4736                      mutable_bitField0_ |= 0x00000001;
4737                    }
4738                    xAttrs_.add(input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.PARSER, extensionRegistry));
4739                    break;
4740                  }
4741                }
4742              }
4743            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4744              throw e.setUnfinishedMessage(this);
4745            } catch (java.io.IOException e) {
4746              throw new com.google.protobuf.InvalidProtocolBufferException(
4747                  e.getMessage()).setUnfinishedMessage(this);
4748            } finally {
4749              if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4750                xAttrs_ = java.util.Collections.unmodifiableList(xAttrs_);
4751              }
4752              this.unknownFields = unknownFields.build();
4753              makeExtensionsImmutable();
4754            }
4755          }
4756          public static final com.google.protobuf.Descriptors.Descriptor
4757              getDescriptor() {
4758            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor;
4759          }
4760    
4761          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4762              internalGetFieldAccessorTable() {
4763            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_fieldAccessorTable
4764                .ensureFieldAccessorsInitialized(
4765                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder.class);
4766          }
4767    
4768          public static com.google.protobuf.Parser<XAttrFeatureProto> PARSER =
4769              new com.google.protobuf.AbstractParser<XAttrFeatureProto>() {
4770            public XAttrFeatureProto parsePartialFrom(
4771                com.google.protobuf.CodedInputStream input,
4772                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4773                throws com.google.protobuf.InvalidProtocolBufferException {
4774              return new XAttrFeatureProto(input, extensionRegistry);
4775            }
4776          };
4777    
4778          @java.lang.Override
4779          public com.google.protobuf.Parser<XAttrFeatureProto> getParserForType() {
4780            return PARSER;
4781          }
4782    
4783          // repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;
4784          public static final int XATTRS_FIELD_NUMBER = 1;
4785          private java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> xAttrs_;
4786          /**
4787           * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4788           */
4789          public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> getXAttrsList() {
4790            return xAttrs_;
4791          }
4792          /**
4793           * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4794           */
4795          public java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder> 
4796              getXAttrsOrBuilderList() {
4797            return xAttrs_;
4798          }
4799          /**
4800           * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4801           */
4802          public int getXAttrsCount() {
4803            return xAttrs_.size();
4804          }
4805          /**
4806           * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4807           */
4808          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto getXAttrs(int index) {
4809            return xAttrs_.get(index);
4810          }
4811          /**
4812           * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4813           */
4814          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder getXAttrsOrBuilder(
4815              int index) {
4816            return xAttrs_.get(index);
4817          }
4818    
4819          private void initFields() {
4820            xAttrs_ = java.util.Collections.emptyList();
4821          }
4822          private byte memoizedIsInitialized = -1;
4823          public final boolean isInitialized() {
4824            byte isInitialized = memoizedIsInitialized;
4825            if (isInitialized != -1) return isInitialized == 1;
4826    
4827            for (int i = 0; i < getXAttrsCount(); i++) {
4828              if (!getXAttrs(i).isInitialized()) {
4829                memoizedIsInitialized = 0;
4830                return false;
4831              }
4832            }
4833            memoizedIsInitialized = 1;
4834            return true;
4835          }
4836    
4837          public void writeTo(com.google.protobuf.CodedOutputStream output)
4838                              throws java.io.IOException {
4839            getSerializedSize();
4840            for (int i = 0; i < xAttrs_.size(); i++) {
4841              output.writeMessage(1, xAttrs_.get(i));
4842            }
4843            getUnknownFields().writeTo(output);
4844          }
4845    
4846          private int memoizedSerializedSize = -1;
4847          public int getSerializedSize() {
4848            int size = memoizedSerializedSize;
4849            if (size != -1) return size;
4850    
4851            size = 0;
4852            for (int i = 0; i < xAttrs_.size(); i++) {
4853              size += com.google.protobuf.CodedOutputStream
4854                .computeMessageSize(1, xAttrs_.get(i));
4855            }
4856            size += getUnknownFields().getSerializedSize();
4857            memoizedSerializedSize = size;
4858            return size;
4859          }
4860    
4861          private static final long serialVersionUID = 0L;
4862          @java.lang.Override
4863          protected java.lang.Object writeReplace()
4864              throws java.io.ObjectStreamException {
4865            return super.writeReplace();
4866          }
4867    
4868          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4869              com.google.protobuf.ByteString data)
4870              throws com.google.protobuf.InvalidProtocolBufferException {
4871            return PARSER.parseFrom(data);
4872          }
4873          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4874              com.google.protobuf.ByteString data,
4875              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4876              throws com.google.protobuf.InvalidProtocolBufferException {
4877            return PARSER.parseFrom(data, extensionRegistry);
4878          }
4879          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(byte[] data)
4880              throws com.google.protobuf.InvalidProtocolBufferException {
4881            return PARSER.parseFrom(data);
4882          }
4883          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4884              byte[] data,
4885              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4886              throws com.google.protobuf.InvalidProtocolBufferException {
4887            return PARSER.parseFrom(data, extensionRegistry);
4888          }
4889          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(java.io.InputStream input)
4890              throws java.io.IOException {
4891            return PARSER.parseFrom(input);
4892          }
4893          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4894              java.io.InputStream input,
4895              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4896              throws java.io.IOException {
4897            return PARSER.parseFrom(input, extensionRegistry);
4898          }
4899          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseDelimitedFrom(java.io.InputStream input)
4900              throws java.io.IOException {
4901            return PARSER.parseDelimitedFrom(input);
4902          }
4903          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseDelimitedFrom(
4904              java.io.InputStream input,
4905              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4906              throws java.io.IOException {
4907            return PARSER.parseDelimitedFrom(input, extensionRegistry);
4908          }
4909          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4910              com.google.protobuf.CodedInputStream input)
4911              throws java.io.IOException {
4912            return PARSER.parseFrom(input);
4913          }
4914          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4915              com.google.protobuf.CodedInputStream input,
4916              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4917              throws java.io.IOException {
4918            return PARSER.parseFrom(input, extensionRegistry);
4919          }
4920    
4921          public static Builder newBuilder() { return Builder.create(); }
4922          public Builder newBuilderForType() { return newBuilder(); }
4923          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto prototype) {
4924            return newBuilder().mergeFrom(prototype);
4925          }
4926          public Builder toBuilder() { return newBuilder(this); }
4927    
4928          @java.lang.Override
4929          protected Builder newBuilderForType(
4930              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4931            Builder builder = new Builder(parent);
4932            return builder;
4933          }
4934          /**
4935           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto}
4936           */
4937          public static final class Builder extends
4938              com.google.protobuf.GeneratedMessage.Builder<Builder>
4939             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder {
4940            public static final com.google.protobuf.Descriptors.Descriptor
4941                getDescriptor() {
4942              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor;
4943            }
4944    
4945            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4946                internalGetFieldAccessorTable() {
4947              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_fieldAccessorTable
4948                  .ensureFieldAccessorsInitialized(
4949                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder.class);
4950            }
4951    
4952            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.newBuilder()
4953            private Builder() {
4954              maybeForceBuilderInitialization();
4955            }
4956    
4957            private Builder(
4958                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4959              super(parent);
4960              maybeForceBuilderInitialization();
4961            }
4962            private void maybeForceBuilderInitialization() {
4963              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4964                getXAttrsFieldBuilder();
4965              }
4966            }
4967            private static Builder create() {
4968              return new Builder();
4969            }
4970    
4971            public Builder clear() {
4972              super.clear();
4973              if (xAttrsBuilder_ == null) {
4974                xAttrs_ = java.util.Collections.emptyList();
4975                bitField0_ = (bitField0_ & ~0x00000001);
4976              } else {
4977                xAttrsBuilder_.clear();
4978              }
4979              return this;
4980            }
4981    
4982            public Builder clone() {
4983              return create().mergeFrom(buildPartial());
4984            }
4985    
4986            public com.google.protobuf.Descriptors.Descriptor
4987                getDescriptorForType() {
4988              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor;
4989            }
4990    
4991            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getDefaultInstanceForType() {
4992              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
4993            }
4994    
4995            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto build() {
4996              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto result = buildPartial();
4997              if (!result.isInitialized()) {
4998                throw newUninitializedMessageException(result);
4999              }
5000              return result;
5001            }
5002    
5003            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto buildPartial() {
5004              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto(this);
5005              int from_bitField0_ = bitField0_;
5006              if (xAttrsBuilder_ == null) {
5007                if (((bitField0_ & 0x00000001) == 0x00000001)) {
5008                  xAttrs_ = java.util.Collections.unmodifiableList(xAttrs_);
5009                  bitField0_ = (bitField0_ & ~0x00000001);
5010                }
5011                result.xAttrs_ = xAttrs_;
5012              } else {
5013                result.xAttrs_ = xAttrsBuilder_.build();
5014              }
5015              onBuilt();
5016              return result;
5017            }
5018    
5019            public Builder mergeFrom(com.google.protobuf.Message other) {
5020              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto) {
5021                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto)other);
5022              } else {
5023                super.mergeFrom(other);
5024                return this;
5025              }
5026            }
5027    
5028            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto other) {
5029              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance()) return this;
5030              if (xAttrsBuilder_ == null) {
5031                if (!other.xAttrs_.isEmpty()) {
5032                  if (xAttrs_.isEmpty()) {
5033                    xAttrs_ = other.xAttrs_;
5034                    bitField0_ = (bitField0_ & ~0x00000001);
5035                  } else {
5036                    ensureXAttrsIsMutable();
5037                    xAttrs_.addAll(other.xAttrs_);
5038                  }
5039                  onChanged();
5040                }
5041              } else {
5042                if (!other.xAttrs_.isEmpty()) {
5043                  if (xAttrsBuilder_.isEmpty()) {
5044                    xAttrsBuilder_.dispose();
5045                    xAttrsBuilder_ = null;
5046                    xAttrs_ = other.xAttrs_;
5047                    bitField0_ = (bitField0_ & ~0x00000001);
5048                    xAttrsBuilder_ = 
5049                      com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
5050                         getXAttrsFieldBuilder() : null;
5051                  } else {
5052                    xAttrsBuilder_.addAllMessages(other.xAttrs_);
5053                  }
5054                }
5055              }
5056              this.mergeUnknownFields(other.getUnknownFields());
5057              return this;
5058            }
5059    
5060            public final boolean isInitialized() {
5061              for (int i = 0; i < getXAttrsCount(); i++) {
5062                if (!getXAttrs(i).isInitialized()) {
5063                  
5064                  return false;
5065                }
5066              }
5067              return true;
5068            }
5069    
5070            public Builder mergeFrom(
5071                com.google.protobuf.CodedInputStream input,
5072                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5073                throws java.io.IOException {
5074              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parsedMessage = null;
5075              try {
5076                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5077              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5078                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto) e.getUnfinishedMessage();
5079                throw e;
5080              } finally {
5081                if (parsedMessage != null) {
5082                  mergeFrom(parsedMessage);
5083                }
5084              }
5085              return this;
5086            }
5087            private int bitField0_;
5088    
5089            // repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;
5090            private java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> xAttrs_ =
5091              java.util.Collections.emptyList();
5092            private void ensureXAttrsIsMutable() {
5093              if (!((bitField0_ & 0x00000001) == 0x00000001)) {
5094                xAttrs_ = new java.util.ArrayList<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto>(xAttrs_);
5095                bitField0_ |= 0x00000001;
5096               }
5097            }
5098    
5099            private com.google.protobuf.RepeatedFieldBuilder<
5100                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder> xAttrsBuilder_;
5101    
5102            /**
5103             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5104             */
5105            public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> getXAttrsList() {
5106              if (xAttrsBuilder_ == null) {
5107                return java.util.Collections.unmodifiableList(xAttrs_);
5108              } else {
5109                return xAttrsBuilder_.getMessageList();
5110              }
5111            }
5112            /**
5113             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5114             */
5115            public int getXAttrsCount() {
5116              if (xAttrsBuilder_ == null) {
5117                return xAttrs_.size();
5118              } else {
5119                return xAttrsBuilder_.getCount();
5120              }
5121            }
5122            /**
5123             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5124             */
5125            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto getXAttrs(int index) {
5126              if (xAttrsBuilder_ == null) {
5127                return xAttrs_.get(index);
5128              } else {
5129                return xAttrsBuilder_.getMessage(index);
5130              }
5131            }
5132            /**
5133             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5134             */
5135            public Builder setXAttrs(
5136                int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto value) {
5137              if (xAttrsBuilder_ == null) {
5138                if (value == null) {
5139                  throw new NullPointerException();
5140                }
5141                ensureXAttrsIsMutable();
5142                xAttrs_.set(index, value);
5143                onChanged();
5144              } else {
5145                xAttrsBuilder_.setMessage(index, value);
5146              }
5147              return this;
5148            }
5149            /**
5150             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5151             */
5152            public Builder setXAttrs(
5153                int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder builderForValue) {
5154              if (xAttrsBuilder_ == null) {
5155                ensureXAttrsIsMutable();
5156                xAttrs_.set(index, builderForValue.build());
5157                onChanged();
5158              } else {
5159                xAttrsBuilder_.setMessage(index, builderForValue.build());
5160              }
5161              return this;
5162            }
5163            /**
5164             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5165             */
5166            public Builder addXAttrs(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto value) {
5167              if (xAttrsBuilder_ == null) {
5168                if (value == null) {
5169                  throw new NullPointerException();
5170                }
5171                ensureXAttrsIsMutable();
5172                xAttrs_.add(value);
5173                onChanged();
5174              } else {
5175                xAttrsBuilder_.addMessage(value);
5176              }
5177              return this;
5178            }
5179            /**
5180             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5181             */
5182            public Builder addXAttrs(
5183                int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto value) {
5184              if (xAttrsBuilder_ == null) {
5185                if (value == null) {
5186                  throw new NullPointerException();
5187                }
5188                ensureXAttrsIsMutable();
5189                xAttrs_.add(index, value);
5190                onChanged();
5191              } else {
5192                xAttrsBuilder_.addMessage(index, value);
5193              }
5194              return this;
5195            }
5196            /**
5197             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5198             */
5199            public Builder addXAttrs(
5200                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder builderForValue) {
5201              if (xAttrsBuilder_ == null) {
5202                ensureXAttrsIsMutable();
5203                xAttrs_.add(builderForValue.build());
5204                onChanged();
5205              } else {
5206                xAttrsBuilder_.addMessage(builderForValue.build());
5207              }
5208              return this;
5209            }
5210            /**
5211             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5212             */
5213            public Builder addXAttrs(
5214                int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder builderForValue) {
5215              if (xAttrsBuilder_ == null) {
5216                ensureXAttrsIsMutable();
5217                xAttrs_.add(index, builderForValue.build());
5218                onChanged();
5219              } else {
5220                xAttrsBuilder_.addMessage(index, builderForValue.build());
5221              }
5222              return this;
5223            }
5224            /**
5225             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5226             */
5227            public Builder addAllXAttrs(
5228                java.lang.Iterable<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> values) {
5229              if (xAttrsBuilder_ == null) {
5230                ensureXAttrsIsMutable();
5231                super.addAll(values, xAttrs_);
5232                onChanged();
5233              } else {
5234                xAttrsBuilder_.addAllMessages(values);
5235              }
5236              return this;
5237            }
5238            /**
5239             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5240             */
5241            public Builder clearXAttrs() {
5242              if (xAttrsBuilder_ == null) {
5243                xAttrs_ = java.util.Collections.emptyList();
5244                bitField0_ = (bitField0_ & ~0x00000001);
5245                onChanged();
5246              } else {
5247                xAttrsBuilder_.clear();
5248              }
5249              return this;
5250            }
5251            /**
5252             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5253             */
5254            public Builder removeXAttrs(int index) {
5255              if (xAttrsBuilder_ == null) {
5256                ensureXAttrsIsMutable();
5257                xAttrs_.remove(index);
5258                onChanged();
5259              } else {
5260                xAttrsBuilder_.remove(index);
5261              }
5262              return this;
5263            }
5264            /**
5265             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5266             */
5267            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder getXAttrsBuilder(
5268                int index) {
5269              return getXAttrsFieldBuilder().getBuilder(index);
5270            }
5271            /**
5272             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5273             */
5274            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder getXAttrsOrBuilder(
5275                int index) {
5276              if (xAttrsBuilder_ == null) {
5277                return xAttrs_.get(index);  } else {
5278                return xAttrsBuilder_.getMessageOrBuilder(index);
5279              }
5280            }
5281            /**
5282             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5283             */
5284            public java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder> 
5285                 getXAttrsOrBuilderList() {
5286              if (xAttrsBuilder_ != null) {
5287                return xAttrsBuilder_.getMessageOrBuilderList();
5288              } else {
5289                return java.util.Collections.unmodifiableList(xAttrs_);
5290              }
5291            }
5292            /**
5293             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5294             */
5295            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder addXAttrsBuilder() {
5296              return getXAttrsFieldBuilder().addBuilder(
5297                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.getDefaultInstance());
5298            }
5299            /**
5300             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5301             */
5302            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder addXAttrsBuilder(
5303                int index) {
5304              return getXAttrsFieldBuilder().addBuilder(
5305                  index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.getDefaultInstance());
5306            }
5307            /**
5308             * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5309             */
5310            public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder> 
5311                 getXAttrsBuilderList() {
5312              return getXAttrsFieldBuilder().getBuilderList();
5313            }
5314            private com.google.protobuf.RepeatedFieldBuilder<
5315                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder> 
5316                getXAttrsFieldBuilder() {
5317              if (xAttrsBuilder_ == null) {
5318                xAttrsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5319                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder>(
5320                        xAttrs_,
5321                        ((bitField0_ & 0x00000001) == 0x00000001),
5322                        getParentForChildren(),
5323                        isClean());
5324                xAttrs_ = null;
5325              }
5326              return xAttrsBuilder_;
5327            }
5328    
5329            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto)
5330          }
5331    
5332          static {
5333            defaultInstance = new XAttrFeatureProto(true);
5334            defaultInstance.initFields();
5335          }
5336    
5337          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto)
5338        }
5339    
5340        public interface INodeFileOrBuilder
5341            extends com.google.protobuf.MessageOrBuilder {
5342    
5343          // optional uint32 replication = 1;
5344          /**
5345           * <code>optional uint32 replication = 1;</code>
5346           */
5347          boolean hasReplication();
5348          /**
5349           * <code>optional uint32 replication = 1;</code>
5350           */
5351          int getReplication();
5352    
5353          // optional uint64 modificationTime = 2;
5354          /**
5355           * <code>optional uint64 modificationTime = 2;</code>
5356           */
5357          boolean hasModificationTime();
5358          /**
5359           * <code>optional uint64 modificationTime = 2;</code>
5360           */
5361          long getModificationTime();
5362    
5363          // optional uint64 accessTime = 3;
5364          /**
5365           * <code>optional uint64 accessTime = 3;</code>
5366           */
5367          boolean hasAccessTime();
5368          /**
5369           * <code>optional uint64 accessTime = 3;</code>
5370           */
5371          long getAccessTime();
5372    
5373          // optional uint64 preferredBlockSize = 4;
5374          /**
5375           * <code>optional uint64 preferredBlockSize = 4;</code>
5376           */
5377          boolean hasPreferredBlockSize();
5378          /**
5379           * <code>optional uint64 preferredBlockSize = 4;</code>
5380           */
5381          long getPreferredBlockSize();
5382    
5383          // optional fixed64 permission = 5;
5384          /**
5385           * <code>optional fixed64 permission = 5;</code>
5386           */
5387          boolean hasPermission();
5388          /**
5389           * <code>optional fixed64 permission = 5;</code>
5390           */
5391          long getPermission();
5392    
5393          // repeated .hadoop.hdfs.BlockProto blocks = 6;
5394          /**
5395           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5396           */
5397          java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> 
5398              getBlocksList();
5399          /**
5400           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5401           */
5402          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlocks(int index);
5403          /**
5404           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5405           */
5406          int getBlocksCount();
5407          /**
5408           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5409           */
5410          java.util.List<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
5411              getBlocksOrBuilderList();
5412          /**
5413           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5414           */
5415          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlocksOrBuilder(
5416              int index);
5417    
5418          // optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;
5419          /**
5420           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5421           */
5422          boolean hasFileUC();
5423          /**
5424           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5425           */
5426          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getFileUC();
5427          /**
5428           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5429           */
5430          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder getFileUCOrBuilder();
5431    
5432          // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;
5433          /**
5434           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5435           */
5436          boolean hasAcl();
5437          /**
5438           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5439           */
5440          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl();
5441          /**
5442           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5443           */
5444          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder();
5445    
5446          // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;
5447          /**
5448           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5449           */
5450          boolean hasXAttrs();
5451          /**
5452           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5453           */
5454          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs();
5455          /**
5456           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5457           */
5458          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder();
5459    
5460          // optional uint32 storagePolicyID = 10;
5461          /**
5462           * <code>optional uint32 storagePolicyID = 10;</code>
5463           */
5464          boolean hasStoragePolicyID();
5465          /**
5466           * <code>optional uint32 storagePolicyID = 10;</code>
5467           */
5468          int getStoragePolicyID();
5469        }
5470        /**
5471         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeFile}
5472         */
5473        public static final class INodeFile extends
5474            com.google.protobuf.GeneratedMessage
5475            implements INodeFileOrBuilder {
5476          // Use INodeFile.newBuilder() to construct.
5477          private INodeFile(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5478            super(builder);
5479            this.unknownFields = builder.getUnknownFields();
5480          }
5481          private INodeFile(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5482    
5483          private static final INodeFile defaultInstance;
5484          public static INodeFile getDefaultInstance() {
5485            return defaultInstance;
5486          }
5487    
5488          public INodeFile getDefaultInstanceForType() {
5489            return defaultInstance;
5490          }
5491    
5492          private final com.google.protobuf.UnknownFieldSet unknownFields;
5493          @java.lang.Override
5494          public final com.google.protobuf.UnknownFieldSet
5495              getUnknownFields() {
5496            return this.unknownFields;
5497          }
5498          private INodeFile(
5499              com.google.protobuf.CodedInputStream input,
5500              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5501              throws com.google.protobuf.InvalidProtocolBufferException {
5502            initFields();
5503            int mutable_bitField0_ = 0;
5504            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5505                com.google.protobuf.UnknownFieldSet.newBuilder();
5506            try {
5507              boolean done = false;
5508              while (!done) {
5509                int tag = input.readTag();
5510                switch (tag) {
5511                  case 0:
5512                    done = true;
5513                    break;
5514                  default: {
5515                    if (!parseUnknownField(input, unknownFields,
5516                                           extensionRegistry, tag)) {
5517                      done = true;
5518                    }
5519                    break;
5520                  }
5521                  case 8: {
5522                    bitField0_ |= 0x00000001;
5523                    replication_ = input.readUInt32();
5524                    break;
5525                  }
5526                  case 16: {
5527                    bitField0_ |= 0x00000002;
5528                    modificationTime_ = input.readUInt64();
5529                    break;
5530                  }
5531                  case 24: {
5532                    bitField0_ |= 0x00000004;
5533                    accessTime_ = input.readUInt64();
5534                    break;
5535                  }
5536                  case 32: {
5537                    bitField0_ |= 0x00000008;
5538                    preferredBlockSize_ = input.readUInt64();
5539                    break;
5540                  }
5541                  case 41: {
5542                    bitField0_ |= 0x00000010;
5543                    permission_ = input.readFixed64();
5544                    break;
5545                  }
5546                  case 50: {
5547                    if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
5548                      blocks_ = new java.util.ArrayList<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto>();
5549                      mutable_bitField0_ |= 0x00000020;
5550                    }
5551                    blocks_.add(input.readMessage(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.PARSER, extensionRegistry));
5552                    break;
5553                  }
5554                  case 58: {
5555                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder subBuilder = null;
5556                    if (((bitField0_ & 0x00000020) == 0x00000020)) {
5557                      subBuilder = fileUC_.toBuilder();
5558                    }
5559                    fileUC_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.PARSER, extensionRegistry);
5560                    if (subBuilder != null) {
5561                      subBuilder.mergeFrom(fileUC_);
5562                      fileUC_ = subBuilder.buildPartial();
5563                    }
5564                    bitField0_ |= 0x00000020;
5565                    break;
5566                  }
5567                  case 66: {
5568                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder subBuilder = null;
5569                    if (((bitField0_ & 0x00000040) == 0x00000040)) {
5570                      subBuilder = acl_.toBuilder();
5571                    }
5572                    acl_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.PARSER, extensionRegistry);
5573                    if (subBuilder != null) {
5574                      subBuilder.mergeFrom(acl_);
5575                      acl_ = subBuilder.buildPartial();
5576                    }
5577                    bitField0_ |= 0x00000040;
5578                    break;
5579                  }
5580                  case 74: {
5581                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder subBuilder = null;
5582                    if (((bitField0_ & 0x00000080) == 0x00000080)) {
5583                      subBuilder = xAttrs_.toBuilder();
5584                    }
5585                    xAttrs_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.PARSER, extensionRegistry);
5586                    if (subBuilder != null) {
5587                      subBuilder.mergeFrom(xAttrs_);
5588                      xAttrs_ = subBuilder.buildPartial();
5589                    }
5590                    bitField0_ |= 0x00000080;
5591                    break;
5592                  }
5593                  case 80: {
5594                    bitField0_ |= 0x00000100;
5595                    storagePolicyID_ = input.readUInt32();
5596                    break;
5597                  }
5598                }
5599              }
5600            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5601              throw e.setUnfinishedMessage(this);
5602            } catch (java.io.IOException e) {
5603              throw new com.google.protobuf.InvalidProtocolBufferException(
5604                  e.getMessage()).setUnfinishedMessage(this);
5605            } finally {
5606              if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
5607                blocks_ = java.util.Collections.unmodifiableList(blocks_);
5608              }
5609              this.unknownFields = unknownFields.build();
5610              makeExtensionsImmutable();
5611            }
5612          }
5613          public static final com.google.protobuf.Descriptors.Descriptor
5614              getDescriptor() {
5615            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
5616          }
5617    
5618          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5619              internalGetFieldAccessorTable() {
5620            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable
5621                .ensureFieldAccessorsInitialized(
5622                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder.class);
5623          }
5624    
5625          public static com.google.protobuf.Parser<INodeFile> PARSER =
5626              new com.google.protobuf.AbstractParser<INodeFile>() {
5627            public INodeFile parsePartialFrom(
5628                com.google.protobuf.CodedInputStream input,
5629                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5630                throws com.google.protobuf.InvalidProtocolBufferException {
5631              return new INodeFile(input, extensionRegistry);
5632            }
5633          };
5634    
5635          @java.lang.Override
5636          public com.google.protobuf.Parser<INodeFile> getParserForType() {
5637            return PARSER;
5638          }
5639    
5640          private int bitField0_;
5641          // optional uint32 replication = 1;
5642          public static final int REPLICATION_FIELD_NUMBER = 1;
5643          private int replication_;
5644          /**
5645           * <code>optional uint32 replication = 1;</code>
5646           */
5647          public boolean hasReplication() {
5648            return ((bitField0_ & 0x00000001) == 0x00000001);
5649          }
5650          /**
5651           * <code>optional uint32 replication = 1;</code>
5652           */
5653          public int getReplication() {
5654            return replication_;
5655          }
5656    
5657          // optional uint64 modificationTime = 2;
5658          public static final int MODIFICATIONTIME_FIELD_NUMBER = 2;
5659          private long modificationTime_;
5660          /**
5661           * <code>optional uint64 modificationTime = 2;</code>
5662           */
5663          public boolean hasModificationTime() {
5664            return ((bitField0_ & 0x00000002) == 0x00000002);
5665          }
5666          /**
5667           * <code>optional uint64 modificationTime = 2;</code>
5668           */
5669          public long getModificationTime() {
5670            return modificationTime_;
5671          }
5672    
5673          // optional uint64 accessTime = 3;
5674          public static final int ACCESSTIME_FIELD_NUMBER = 3;
5675          private long accessTime_;
5676          /**
5677           * <code>optional uint64 accessTime = 3;</code>
5678           */
5679          public boolean hasAccessTime() {
5680            return ((bitField0_ & 0x00000004) == 0x00000004);
5681          }
5682          /**
5683           * <code>optional uint64 accessTime = 3;</code>
5684           */
5685          public long getAccessTime() {
5686            return accessTime_;
5687          }
5688    
5689          // optional uint64 preferredBlockSize = 4;
5690          public static final int PREFERREDBLOCKSIZE_FIELD_NUMBER = 4;
5691          private long preferredBlockSize_;
5692          /**
5693           * <code>optional uint64 preferredBlockSize = 4;</code>
5694           */
5695          public boolean hasPreferredBlockSize() {
5696            return ((bitField0_ & 0x00000008) == 0x00000008);
5697          }
5698          /**
5699           * <code>optional uint64 preferredBlockSize = 4;</code>
5700           */
5701          public long getPreferredBlockSize() {
5702            return preferredBlockSize_;
5703          }
5704    
5705          // optional fixed64 permission = 5;
5706          public static final int PERMISSION_FIELD_NUMBER = 5;
5707          private long permission_;
5708          /**
5709           * <code>optional fixed64 permission = 5;</code>
5710           */
5711          public boolean hasPermission() {
5712            return ((bitField0_ & 0x00000010) == 0x00000010);
5713          }
5714          /**
5715           * <code>optional fixed64 permission = 5;</code>
5716           */
5717          public long getPermission() {
5718            return permission_;
5719          }
5720    
5721          // repeated .hadoop.hdfs.BlockProto blocks = 6;
5722          public static final int BLOCKS_FIELD_NUMBER = 6;
5723          private java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> blocks_;
5724          /**
5725           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5726           */
5727          public java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> getBlocksList() {
5728            return blocks_;
5729          }
5730          /**
5731           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5732           */
5733          public java.util.List<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
5734              getBlocksOrBuilderList() {
5735            return blocks_;
5736          }
5737          /**
5738           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5739           */
5740          public int getBlocksCount() {
5741            return blocks_.size();
5742          }
5743          /**
5744           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5745           */
5746          public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlocks(int index) {
5747            return blocks_.get(index);
5748          }
5749          /**
5750           * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5751           */
5752          public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlocksOrBuilder(
5753              int index) {
5754            return blocks_.get(index);
5755          }
5756    
5757          // optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;
5758          public static final int FILEUC_FIELD_NUMBER = 7;
5759          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature fileUC_;
5760          /**
5761           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5762           */
5763          public boolean hasFileUC() {
5764            return ((bitField0_ & 0x00000020) == 0x00000020);
5765          }
5766          /**
5767           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5768           */
5769          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getFileUC() {
5770            return fileUC_;
5771          }
5772          /**
5773           * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5774           */
5775          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder getFileUCOrBuilder() {
5776            return fileUC_;
5777          }
5778    
5779          // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;
5780          public static final int ACL_FIELD_NUMBER = 8;
5781          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_;
5782          /**
5783           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5784           */
5785          public boolean hasAcl() {
5786            return ((bitField0_ & 0x00000040) == 0x00000040);
5787          }
5788          /**
5789           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5790           */
5791          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
5792            return acl_;
5793          }
5794          /**
5795           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5796           */
5797          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
5798            return acl_;
5799          }
5800    
5801          // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;
5802          public static final int XATTRS_FIELD_NUMBER = 9;
5803          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto xAttrs_;
5804          /**
5805           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5806           */
5807          public boolean hasXAttrs() {
5808            return ((bitField0_ & 0x00000080) == 0x00000080);
5809          }
5810          /**
5811           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5812           */
5813          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs() {
5814            return xAttrs_;
5815          }
5816          /**
5817           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5818           */
5819          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder() {
5820            return xAttrs_;
5821          }
5822    
5823          // optional uint32 storagePolicyID = 10;
5824          public static final int STORAGEPOLICYID_FIELD_NUMBER = 10;
5825          private int storagePolicyID_;
5826          /**
5827           * <code>optional uint32 storagePolicyID = 10;</code>
5828           */
5829          public boolean hasStoragePolicyID() {
5830            return ((bitField0_ & 0x00000100) == 0x00000100);
5831          }
5832          /**
5833           * <code>optional uint32 storagePolicyID = 10;</code>
5834           */
5835          public int getStoragePolicyID() {
5836            return storagePolicyID_;
5837          }
5838    
5839          private void initFields() {
5840            replication_ = 0;
5841            modificationTime_ = 0L;
5842            accessTime_ = 0L;
5843            preferredBlockSize_ = 0L;
5844            permission_ = 0L;
5845            blocks_ = java.util.Collections.emptyList();
5846            fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
5847            acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
5848            xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
5849            storagePolicyID_ = 0;
5850          }
5851          private byte memoizedIsInitialized = -1;
5852          public final boolean isInitialized() {
5853            byte isInitialized = memoizedIsInitialized;
5854            if (isInitialized != -1) return isInitialized == 1;
5855    
5856            for (int i = 0; i < getBlocksCount(); i++) {
5857              if (!getBlocks(i).isInitialized()) {
5858                memoizedIsInitialized = 0;
5859                return false;
5860              }
5861            }
5862            if (hasXAttrs()) {
5863              if (!getXAttrs().isInitialized()) {
5864                memoizedIsInitialized = 0;
5865                return false;
5866              }
5867            }
5868            memoizedIsInitialized = 1;
5869            return true;
5870          }
5871    
5872          public void writeTo(com.google.protobuf.CodedOutputStream output)
5873                              throws java.io.IOException {
5874            getSerializedSize();
5875            if (((bitField0_ & 0x00000001) == 0x00000001)) {
5876              output.writeUInt32(1, replication_);
5877            }
5878            if (((bitField0_ & 0x00000002) == 0x00000002)) {
5879              output.writeUInt64(2, modificationTime_);
5880            }
5881            if (((bitField0_ & 0x00000004) == 0x00000004)) {
5882              output.writeUInt64(3, accessTime_);
5883            }
5884            if (((bitField0_ & 0x00000008) == 0x00000008)) {
5885              output.writeUInt64(4, preferredBlockSize_);
5886            }
5887            if (((bitField0_ & 0x00000010) == 0x00000010)) {
5888              output.writeFixed64(5, permission_);
5889            }
5890            for (int i = 0; i < blocks_.size(); i++) {
5891              output.writeMessage(6, blocks_.get(i));
5892            }
5893            if (((bitField0_ & 0x00000020) == 0x00000020)) {
5894              output.writeMessage(7, fileUC_);
5895            }
5896            if (((bitField0_ & 0x00000040) == 0x00000040)) {
5897              output.writeMessage(8, acl_);
5898            }
5899            if (((bitField0_ & 0x00000080) == 0x00000080)) {
5900              output.writeMessage(9, xAttrs_);
5901            }
5902            if (((bitField0_ & 0x00000100) == 0x00000100)) {
5903              output.writeUInt32(10, storagePolicyID_);
5904            }
5905            getUnknownFields().writeTo(output);
5906          }
5907    
5908          private int memoizedSerializedSize = -1;
5909          public int getSerializedSize() {
5910            int size = memoizedSerializedSize;
5911            if (size != -1) return size;
5912    
5913            size = 0;
5914            if (((bitField0_ & 0x00000001) == 0x00000001)) {
5915              size += com.google.protobuf.CodedOutputStream
5916                .computeUInt32Size(1, replication_);
5917            }
5918            if (((bitField0_ & 0x00000002) == 0x00000002)) {
5919              size += com.google.protobuf.CodedOutputStream
5920                .computeUInt64Size(2, modificationTime_);
5921            }
5922            if (((bitField0_ & 0x00000004) == 0x00000004)) {
5923              size += com.google.protobuf.CodedOutputStream
5924                .computeUInt64Size(3, accessTime_);
5925            }
5926            if (((bitField0_ & 0x00000008) == 0x00000008)) {
5927              size += com.google.protobuf.CodedOutputStream
5928                .computeUInt64Size(4, preferredBlockSize_);
5929            }
5930            if (((bitField0_ & 0x00000010) == 0x00000010)) {
5931              size += com.google.protobuf.CodedOutputStream
5932                .computeFixed64Size(5, permission_);
5933            }
5934            for (int i = 0; i < blocks_.size(); i++) {
5935              size += com.google.protobuf.CodedOutputStream
5936                .computeMessageSize(6, blocks_.get(i));
5937            }
5938            if (((bitField0_ & 0x00000020) == 0x00000020)) {
5939              size += com.google.protobuf.CodedOutputStream
5940                .computeMessageSize(7, fileUC_);
5941            }
5942            if (((bitField0_ & 0x00000040) == 0x00000040)) {
5943              size += com.google.protobuf.CodedOutputStream
5944                .computeMessageSize(8, acl_);
5945            }
5946            if (((bitField0_ & 0x00000080) == 0x00000080)) {
5947              size += com.google.protobuf.CodedOutputStream
5948                .computeMessageSize(9, xAttrs_);
5949            }
5950            if (((bitField0_ & 0x00000100) == 0x00000100)) {
5951              size += com.google.protobuf.CodedOutputStream
5952                .computeUInt32Size(10, storagePolicyID_);
5953            }
5954            size += getUnknownFields().getSerializedSize();
5955            memoizedSerializedSize = size;
5956            return size;
5957          }
5958    
5959          private static final long serialVersionUID = 0L;
5960          @java.lang.Override
5961          protected java.lang.Object writeReplace()
5962              throws java.io.ObjectStreamException {
5963            return super.writeReplace();
5964          }
5965    
5966          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
5967              com.google.protobuf.ByteString data)
5968              throws com.google.protobuf.InvalidProtocolBufferException {
5969            return PARSER.parseFrom(data);
5970          }
5971          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
5972              com.google.protobuf.ByteString data,
5973              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5974              throws com.google.protobuf.InvalidProtocolBufferException {
5975            return PARSER.parseFrom(data, extensionRegistry);
5976          }
5977          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(byte[] data)
5978              throws com.google.protobuf.InvalidProtocolBufferException {
5979            return PARSER.parseFrom(data);
5980          }
5981          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
5982              byte[] data,
5983              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5984              throws com.google.protobuf.InvalidProtocolBufferException {
5985            return PARSER.parseFrom(data, extensionRegistry);
5986          }
5987          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(java.io.InputStream input)
5988              throws java.io.IOException {
5989            return PARSER.parseFrom(input);
5990          }
5991          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
5992              java.io.InputStream input,
5993              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5994              throws java.io.IOException {
5995            return PARSER.parseFrom(input, extensionRegistry);
5996          }
5997          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseDelimitedFrom(java.io.InputStream input)
5998              throws java.io.IOException {
5999            return PARSER.parseDelimitedFrom(input);
6000          }
6001          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseDelimitedFrom(
6002              java.io.InputStream input,
6003              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6004              throws java.io.IOException {
6005            return PARSER.parseDelimitedFrom(input, extensionRegistry);
6006          }
6007          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
6008              com.google.protobuf.CodedInputStream input)
6009              throws java.io.IOException {
6010            return PARSER.parseFrom(input);
6011          }
6012          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
6013              com.google.protobuf.CodedInputStream input,
6014              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6015              throws java.io.IOException {
6016            return PARSER.parseFrom(input, extensionRegistry);
6017          }
6018    
6019          public static Builder newBuilder() { return Builder.create(); }
6020          public Builder newBuilderForType() { return newBuilder(); }
6021          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile prototype) {
6022            return newBuilder().mergeFrom(prototype);
6023          }
6024          public Builder toBuilder() { return newBuilder(this); }
6025    
6026          @java.lang.Override
6027          protected Builder newBuilderForType(
6028              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6029            Builder builder = new Builder(parent);
6030            return builder;
6031          }
6032          /**
6033           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeFile}
6034           */
6035          public static final class Builder extends
6036              com.google.protobuf.GeneratedMessage.Builder<Builder>
6037             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder {
6038            public static final com.google.protobuf.Descriptors.Descriptor
6039                getDescriptor() {
6040              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
6041            }
6042    
6043            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6044                internalGetFieldAccessorTable() {
6045              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable
6046                  .ensureFieldAccessorsInitialized(
6047                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder.class);
6048            }
6049    
6050            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.newBuilder()
6051            private Builder() {
6052              maybeForceBuilderInitialization();
6053            }
6054    
6055            private Builder(
6056                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6057              super(parent);
6058              maybeForceBuilderInitialization();
6059            }
6060            private void maybeForceBuilderInitialization() {
6061              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6062                getBlocksFieldBuilder();
6063                getFileUCFieldBuilder();
6064                getAclFieldBuilder();
6065                getXAttrsFieldBuilder();
6066              }
6067            }
6068            private static Builder create() {
6069              return new Builder();
6070            }
6071    
6072            public Builder clear() {
6073              super.clear();
6074              replication_ = 0;
6075              bitField0_ = (bitField0_ & ~0x00000001);
6076              modificationTime_ = 0L;
6077              bitField0_ = (bitField0_ & ~0x00000002);
6078              accessTime_ = 0L;
6079              bitField0_ = (bitField0_ & ~0x00000004);
6080              preferredBlockSize_ = 0L;
6081              bitField0_ = (bitField0_ & ~0x00000008);
6082              permission_ = 0L;
6083              bitField0_ = (bitField0_ & ~0x00000010);
6084              if (blocksBuilder_ == null) {
6085                blocks_ = java.util.Collections.emptyList();
6086                bitField0_ = (bitField0_ & ~0x00000020);
6087              } else {
6088                blocksBuilder_.clear();
6089              }
6090              if (fileUCBuilder_ == null) {
6091                fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
6092              } else {
6093                fileUCBuilder_.clear();
6094              }
6095              bitField0_ = (bitField0_ & ~0x00000040);
6096              if (aclBuilder_ == null) {
6097                acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
6098              } else {
6099                aclBuilder_.clear();
6100              }
6101              bitField0_ = (bitField0_ & ~0x00000080);
6102              if (xAttrsBuilder_ == null) {
6103                xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
6104              } else {
6105                xAttrsBuilder_.clear();
6106              }
6107              bitField0_ = (bitField0_ & ~0x00000100);
6108              storagePolicyID_ = 0;
6109              bitField0_ = (bitField0_ & ~0x00000200);
6110              return this;
6111            }
6112    
6113            public Builder clone() {
6114              return create().mergeFrom(buildPartial());
6115            }
6116    
6117            public com.google.protobuf.Descriptors.Descriptor
6118                getDescriptorForType() {
6119              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
6120            }
6121    
6122            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getDefaultInstanceForType() {
6123              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
6124            }
6125    
6126            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile build() {
6127              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile result = buildPartial();
6128              if (!result.isInitialized()) {
6129                throw newUninitializedMessageException(result);
6130              }
6131              return result;
6132            }
6133    
6134            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile buildPartial() {
6135              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile(this);
6136              int from_bitField0_ = bitField0_;
6137              int to_bitField0_ = 0;
6138              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6139                to_bitField0_ |= 0x00000001;
6140              }
6141              result.replication_ = replication_;
6142              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
6143                to_bitField0_ |= 0x00000002;
6144              }
6145              result.modificationTime_ = modificationTime_;
6146              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
6147                to_bitField0_ |= 0x00000004;
6148              }
6149              result.accessTime_ = accessTime_;
6150              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
6151                to_bitField0_ |= 0x00000008;
6152              }
6153              result.preferredBlockSize_ = preferredBlockSize_;
6154              if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
6155                to_bitField0_ |= 0x00000010;
6156              }
6157              result.permission_ = permission_;
6158              if (blocksBuilder_ == null) {
6159                if (((bitField0_ & 0x00000020) == 0x00000020)) {
6160                  blocks_ = java.util.Collections.unmodifiableList(blocks_);
6161                  bitField0_ = (bitField0_ & ~0x00000020);
6162                }
6163                result.blocks_ = blocks_;
6164              } else {
6165                result.blocks_ = blocksBuilder_.build();
6166              }
6167              if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
6168                to_bitField0_ |= 0x00000020;
6169              }
6170              if (fileUCBuilder_ == null) {
6171                result.fileUC_ = fileUC_;
6172              } else {
6173                result.fileUC_ = fileUCBuilder_.build();
6174              }
6175              if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
6176                to_bitField0_ |= 0x00000040;
6177              }
6178              if (aclBuilder_ == null) {
6179                result.acl_ = acl_;
6180              } else {
6181                result.acl_ = aclBuilder_.build();
6182              }
6183              if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
6184                to_bitField0_ |= 0x00000080;
6185              }
6186              if (xAttrsBuilder_ == null) {
6187                result.xAttrs_ = xAttrs_;
6188              } else {
6189                result.xAttrs_ = xAttrsBuilder_.build();
6190              }
6191              if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
6192                to_bitField0_ |= 0x00000100;
6193              }
6194              result.storagePolicyID_ = storagePolicyID_;
6195              result.bitField0_ = to_bitField0_;
6196              onBuilt();
6197              return result;
6198            }
6199    
6200            public Builder mergeFrom(com.google.protobuf.Message other) {
6201              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile) {
6202                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile)other);
6203              } else {
6204                super.mergeFrom(other);
6205                return this;
6206              }
6207            }
6208    
6209            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile other) {
6210              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance()) return this;
6211              if (other.hasReplication()) {
6212                setReplication(other.getReplication());
6213              }
6214              if (other.hasModificationTime()) {
6215                setModificationTime(other.getModificationTime());
6216              }
6217              if (other.hasAccessTime()) {
6218                setAccessTime(other.getAccessTime());
6219              }
6220              if (other.hasPreferredBlockSize()) {
6221                setPreferredBlockSize(other.getPreferredBlockSize());
6222              }
6223              if (other.hasPermission()) {
6224                setPermission(other.getPermission());
6225              }
6226              if (blocksBuilder_ == null) {
6227                if (!other.blocks_.isEmpty()) {
6228                  if (blocks_.isEmpty()) {
6229                    blocks_ = other.blocks_;
6230                    bitField0_ = (bitField0_ & ~0x00000020);
6231                  } else {
6232                    ensureBlocksIsMutable();
6233                    blocks_.addAll(other.blocks_);
6234                  }
6235                  onChanged();
6236                }
6237              } else {
6238                if (!other.blocks_.isEmpty()) {
6239                  if (blocksBuilder_.isEmpty()) {
6240                    blocksBuilder_.dispose();
6241                    blocksBuilder_ = null;
6242                    blocks_ = other.blocks_;
6243                    bitField0_ = (bitField0_ & ~0x00000020);
6244                    blocksBuilder_ = 
6245                      com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
6246                         getBlocksFieldBuilder() : null;
6247                  } else {
6248                    blocksBuilder_.addAllMessages(other.blocks_);
6249                  }
6250                }
6251              }
6252              if (other.hasFileUC()) {
6253                mergeFileUC(other.getFileUC());
6254              }
6255              if (other.hasAcl()) {
6256                mergeAcl(other.getAcl());
6257              }
6258              if (other.hasXAttrs()) {
6259                mergeXAttrs(other.getXAttrs());
6260              }
6261              if (other.hasStoragePolicyID()) {
6262                setStoragePolicyID(other.getStoragePolicyID());
6263              }
6264              this.mergeUnknownFields(other.getUnknownFields());
6265              return this;
6266            }
6267    
6268            public final boolean isInitialized() {
6269              for (int i = 0; i < getBlocksCount(); i++) {
6270                if (!getBlocks(i).isInitialized()) {
6271                  
6272                  return false;
6273                }
6274              }
6275              if (hasXAttrs()) {
6276                if (!getXAttrs().isInitialized()) {
6277                  
6278                  return false;
6279                }
6280              }
6281              return true;
6282            }
6283    
6284            public Builder mergeFrom(
6285                com.google.protobuf.CodedInputStream input,
6286                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6287                throws java.io.IOException {
6288              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parsedMessage = null;
6289              try {
6290                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6291              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6292                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile) e.getUnfinishedMessage();
6293                throw e;
6294              } finally {
6295                if (parsedMessage != null) {
6296                  mergeFrom(parsedMessage);
6297                }
6298              }
6299              return this;
6300            }
6301            private int bitField0_;
6302    
6303            // optional uint32 replication = 1;
6304            private int replication_ ;
6305            /**
6306             * <code>optional uint32 replication = 1;</code>
6307             */
6308            public boolean hasReplication() {
6309              return ((bitField0_ & 0x00000001) == 0x00000001);
6310            }
6311            /**
6312             * <code>optional uint32 replication = 1;</code>
6313             */
6314            public int getReplication() {
6315              return replication_;
6316            }
6317            /**
6318             * <code>optional uint32 replication = 1;</code>
6319             */
6320            public Builder setReplication(int value) {
6321              bitField0_ |= 0x00000001;
6322              replication_ = value;
6323              onChanged();
6324              return this;
6325            }
6326            /**
6327             * <code>optional uint32 replication = 1;</code>
6328             */
6329            public Builder clearReplication() {
6330              bitField0_ = (bitField0_ & ~0x00000001);
6331              replication_ = 0;
6332              onChanged();
6333              return this;
6334            }
6335    
6336            // optional uint64 modificationTime = 2;
6337            private long modificationTime_ ;
6338            /**
6339             * <code>optional uint64 modificationTime = 2;</code>
6340             */
6341            public boolean hasModificationTime() {
6342              return ((bitField0_ & 0x00000002) == 0x00000002);
6343            }
6344            /**
6345             * <code>optional uint64 modificationTime = 2;</code>
6346             */
6347            public long getModificationTime() {
6348              return modificationTime_;
6349            }
6350            /**
6351             * <code>optional uint64 modificationTime = 2;</code>
6352             */
6353            public Builder setModificationTime(long value) {
6354              bitField0_ |= 0x00000002;
6355              modificationTime_ = value;
6356              onChanged();
6357              return this;
6358            }
6359            /**
6360             * <code>optional uint64 modificationTime = 2;</code>
6361             */
6362            public Builder clearModificationTime() {
6363              bitField0_ = (bitField0_ & ~0x00000002);
6364              modificationTime_ = 0L;
6365              onChanged();
6366              return this;
6367            }
6368    
6369            // optional uint64 accessTime = 3;
6370            private long accessTime_ ;
6371            /**
6372             * <code>optional uint64 accessTime = 3;</code>
6373             */
6374            public boolean hasAccessTime() {
6375              return ((bitField0_ & 0x00000004) == 0x00000004);
6376            }
6377            /**
6378             * <code>optional uint64 accessTime = 3;</code>
6379             */
6380            public long getAccessTime() {
6381              return accessTime_;
6382            }
6383            /**
6384             * <code>optional uint64 accessTime = 3;</code>
6385             */
6386            public Builder setAccessTime(long value) {
6387              bitField0_ |= 0x00000004;
6388              accessTime_ = value;
6389              onChanged();
6390              return this;
6391            }
6392            /**
6393             * <code>optional uint64 accessTime = 3;</code>
6394             */
6395            public Builder clearAccessTime() {
6396              bitField0_ = (bitField0_ & ~0x00000004);
6397              accessTime_ = 0L;
6398              onChanged();
6399              return this;
6400            }
6401    
6402            // optional uint64 preferredBlockSize = 4;
6403            private long preferredBlockSize_ ;
6404            /**
6405             * <code>optional uint64 preferredBlockSize = 4;</code>
6406             */
6407            public boolean hasPreferredBlockSize() {
6408              return ((bitField0_ & 0x00000008) == 0x00000008);
6409            }
6410            /**
6411             * <code>optional uint64 preferredBlockSize = 4;</code>
6412             */
6413            public long getPreferredBlockSize() {
6414              return preferredBlockSize_;
6415            }
6416            /**
6417             * <code>optional uint64 preferredBlockSize = 4;</code>
6418             */
6419            public Builder setPreferredBlockSize(long value) {
6420              bitField0_ |= 0x00000008;
6421              preferredBlockSize_ = value;
6422              onChanged();
6423              return this;
6424            }
6425            /**
6426             * <code>optional uint64 preferredBlockSize = 4;</code>
6427             */
6428            public Builder clearPreferredBlockSize() {
6429              bitField0_ = (bitField0_ & ~0x00000008);
6430              preferredBlockSize_ = 0L;
6431              onChanged();
6432              return this;
6433            }
6434    
6435            // optional fixed64 permission = 5;
6436            private long permission_ ;
6437            /**
6438             * <code>optional fixed64 permission = 5;</code>
6439             */
6440            public boolean hasPermission() {
6441              return ((bitField0_ & 0x00000010) == 0x00000010);
6442            }
6443            /**
6444             * <code>optional fixed64 permission = 5;</code>
6445             */
6446            public long getPermission() {
6447              return permission_;
6448            }
6449            /**
6450             * <code>optional fixed64 permission = 5;</code>
6451             */
6452            public Builder setPermission(long value) {
6453              bitField0_ |= 0x00000010;
6454              permission_ = value;
6455              onChanged();
6456              return this;
6457            }
6458            /**
6459             * <code>optional fixed64 permission = 5;</code>
6460             */
6461            public Builder clearPermission() {
6462              bitField0_ = (bitField0_ & ~0x00000010);
6463              permission_ = 0L;
6464              onChanged();
6465              return this;
6466            }
6467    
6468            // repeated .hadoop.hdfs.BlockProto blocks = 6;
6469            private java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> blocks_ =
6470              java.util.Collections.emptyList();
6471            private void ensureBlocksIsMutable() {
6472              if (!((bitField0_ & 0x00000020) == 0x00000020)) {
6473                blocks_ = new java.util.ArrayList<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto>(blocks_);
6474                bitField0_ |= 0x00000020;
6475               }
6476            }
6477    
6478            private com.google.protobuf.RepeatedFieldBuilder<
6479                org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> blocksBuilder_;
6480    
6481            /**
6482             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6483             */
6484            public java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> getBlocksList() {
6485              if (blocksBuilder_ == null) {
6486                return java.util.Collections.unmodifiableList(blocks_);
6487              } else {
6488                return blocksBuilder_.getMessageList();
6489              }
6490            }
6491            /**
6492             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6493             */
6494            public int getBlocksCount() {
6495              if (blocksBuilder_ == null) {
6496                return blocks_.size();
6497              } else {
6498                return blocksBuilder_.getCount();
6499              }
6500            }
6501            /**
6502             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6503             */
6504            public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlocks(int index) {
6505              if (blocksBuilder_ == null) {
6506                return blocks_.get(index);
6507              } else {
6508                return blocksBuilder_.getMessage(index);
6509              }
6510            }
6511            /**
6512             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6513             */
6514            public Builder setBlocks(
6515                int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
6516              if (blocksBuilder_ == null) {
6517                if (value == null) {
6518                  throw new NullPointerException();
6519                }
6520                ensureBlocksIsMutable();
6521                blocks_.set(index, value);
6522                onChanged();
6523              } else {
6524                blocksBuilder_.setMessage(index, value);
6525              }
6526              return this;
6527            }
6528            /**
6529             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6530             */
6531            public Builder setBlocks(
6532                int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) {
6533              if (blocksBuilder_ == null) {
6534                ensureBlocksIsMutable();
6535                blocks_.set(index, builderForValue.build());
6536                onChanged();
6537              } else {
6538                blocksBuilder_.setMessage(index, builderForValue.build());
6539              }
6540              return this;
6541            }
6542            /**
6543             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6544             */
6545            public Builder addBlocks(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
6546              if (blocksBuilder_ == null) {
6547                if (value == null) {
6548                  throw new NullPointerException();
6549                }
6550                ensureBlocksIsMutable();
6551                blocks_.add(value);
6552                onChanged();
6553              } else {
6554                blocksBuilder_.addMessage(value);
6555              }
6556              return this;
6557            }
6558            /**
6559             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6560             */
6561            public Builder addBlocks(
6562                int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
6563              if (blocksBuilder_ == null) {
6564                if (value == null) {
6565                  throw new NullPointerException();
6566                }
6567                ensureBlocksIsMutable();
6568                blocks_.add(index, value);
6569                onChanged();
6570              } else {
6571                blocksBuilder_.addMessage(index, value);
6572              }
6573              return this;
6574            }
6575            /**
6576             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6577             */
6578            public Builder addBlocks(
6579                org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) {
6580              if (blocksBuilder_ == null) {
6581                ensureBlocksIsMutable();
6582                blocks_.add(builderForValue.build());
6583                onChanged();
6584              } else {
6585                blocksBuilder_.addMessage(builderForValue.build());
6586              }
6587              return this;
6588            }
6589            /**
6590             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6591             */
6592            public Builder addBlocks(
6593                int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) {
6594              if (blocksBuilder_ == null) {
6595                ensureBlocksIsMutable();
6596                blocks_.add(index, builderForValue.build());
6597                onChanged();
6598              } else {
6599                blocksBuilder_.addMessage(index, builderForValue.build());
6600              }
6601              return this;
6602            }
6603            /**
6604             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6605             */
6606            public Builder addAllBlocks(
6607                java.lang.Iterable<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> values) {
6608              if (blocksBuilder_ == null) {
6609                ensureBlocksIsMutable();
6610                super.addAll(values, blocks_);
6611                onChanged();
6612              } else {
6613                blocksBuilder_.addAllMessages(values);
6614              }
6615              return this;
6616            }
6617            /**
6618             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6619             */
6620            public Builder clearBlocks() {
6621              if (blocksBuilder_ == null) {
6622                blocks_ = java.util.Collections.emptyList();
6623                bitField0_ = (bitField0_ & ~0x00000020);
6624                onChanged();
6625              } else {
6626                blocksBuilder_.clear();
6627              }
6628              return this;
6629            }
6630            /**
6631             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6632             */
6633            public Builder removeBlocks(int index) {
6634              if (blocksBuilder_ == null) {
6635                ensureBlocksIsMutable();
6636                blocks_.remove(index);
6637                onChanged();
6638              } else {
6639                blocksBuilder_.remove(index);
6640              }
6641              return this;
6642            }
6643            /**
6644             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6645             */
6646            public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder getBlocksBuilder(
6647                int index) {
6648              return getBlocksFieldBuilder().getBuilder(index);
6649            }
6650            /**
6651             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6652             */
6653            public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlocksOrBuilder(
6654                int index) {
6655              if (blocksBuilder_ == null) {
6656                return blocks_.get(index);  } else {
6657                return blocksBuilder_.getMessageOrBuilder(index);
6658              }
6659            }
6660            /**
6661             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6662             */
6663            public java.util.List<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
6664                 getBlocksOrBuilderList() {
6665              if (blocksBuilder_ != null) {
6666                return blocksBuilder_.getMessageOrBuilderList();
6667              } else {
6668                return java.util.Collections.unmodifiableList(blocks_);
6669              }
6670            }
6671            /**
6672             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6673             */
6674            public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder addBlocksBuilder() {
6675              return getBlocksFieldBuilder().addBuilder(
6676                  org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance());
6677            }
6678            /**
6679             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6680             */
6681            public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder addBlocksBuilder(
6682                int index) {
6683              return getBlocksFieldBuilder().addBuilder(
6684                  index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance());
6685            }
6686            /**
6687             * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6688             */
6689            public java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder> 
6690                 getBlocksBuilderList() {
6691              return getBlocksFieldBuilder().getBuilderList();
6692            }
6693            private com.google.protobuf.RepeatedFieldBuilder<
6694                org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
6695                getBlocksFieldBuilder() {
6696              if (blocksBuilder_ == null) {
6697                blocksBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
6698                    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder>(
6699                        blocks_,
6700                        ((bitField0_ & 0x00000020) == 0x00000020),
6701                        getParentForChildren(),
6702                        isClean());
6703                blocks_ = null;
6704              }
6705              return blocksBuilder_;
6706            }
6707    
6708            // optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;
6709            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
6710            private com.google.protobuf.SingleFieldBuilder<
6711                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder> fileUCBuilder_;
6712            /**
6713             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6714             */
6715            public boolean hasFileUC() {
6716              return ((bitField0_ & 0x00000040) == 0x00000040);
6717            }
6718            /**
6719             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6720             */
6721            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getFileUC() {
6722              if (fileUCBuilder_ == null) {
6723                return fileUC_;
6724              } else {
6725                return fileUCBuilder_.getMessage();
6726              }
6727            }
6728            /**
6729             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6730             */
6731            public Builder setFileUC(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature value) {
6732              if (fileUCBuilder_ == null) {
6733                if (value == null) {
6734                  throw new NullPointerException();
6735                }
6736                fileUC_ = value;
6737                onChanged();
6738              } else {
6739                fileUCBuilder_.setMessage(value);
6740              }
6741              bitField0_ |= 0x00000040;
6742              return this;
6743            }
6744            /**
6745             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6746             */
6747            public Builder setFileUC(
6748                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder builderForValue) {
6749              if (fileUCBuilder_ == null) {
6750                fileUC_ = builderForValue.build();
6751                onChanged();
6752              } else {
6753                fileUCBuilder_.setMessage(builderForValue.build());
6754              }
6755              bitField0_ |= 0x00000040;
6756              return this;
6757            }
6758            /**
6759             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6760             */
6761            public Builder mergeFileUC(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature value) {
6762              if (fileUCBuilder_ == null) {
6763                if (((bitField0_ & 0x00000040) == 0x00000040) &&
6764                    fileUC_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance()) {
6765                  fileUC_ =
6766                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.newBuilder(fileUC_).mergeFrom(value).buildPartial();
6767                } else {
6768                  fileUC_ = value;
6769                }
6770                onChanged();
6771              } else {
6772                fileUCBuilder_.mergeFrom(value);
6773              }
6774              bitField0_ |= 0x00000040;
6775              return this;
6776            }
6777            /**
6778             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6779             */
6780            public Builder clearFileUC() {
6781              if (fileUCBuilder_ == null) {
6782                fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
6783                onChanged();
6784              } else {
6785                fileUCBuilder_.clear();
6786              }
6787              bitField0_ = (bitField0_ & ~0x00000040);
6788              return this;
6789            }
6790            /**
6791             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6792             */
6793            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder getFileUCBuilder() {
6794              bitField0_ |= 0x00000040;
6795              onChanged();
6796              return getFileUCFieldBuilder().getBuilder();
6797            }
6798            /**
6799             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6800             */
6801            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder getFileUCOrBuilder() {
6802              if (fileUCBuilder_ != null) {
6803                return fileUCBuilder_.getMessageOrBuilder();
6804              } else {
6805                return fileUC_;
6806              }
6807            }
6808            /**
6809             * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6810             */
6811            private com.google.protobuf.SingleFieldBuilder<
6812                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder> 
6813                getFileUCFieldBuilder() {
6814              if (fileUCBuilder_ == null) {
6815                fileUCBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6816                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder>(
6817                        fileUC_,
6818                        getParentForChildren(),
6819                        isClean());
6820                fileUC_ = null;
6821              }
6822              return fileUCBuilder_;
6823            }
6824    
6825            // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;
6826            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
6827            private com.google.protobuf.SingleFieldBuilder<
6828                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> aclBuilder_;
6829            /**
6830             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6831             */
6832            public boolean hasAcl() {
6833              return ((bitField0_ & 0x00000080) == 0x00000080);
6834            }
6835            /**
6836             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6837             */
6838            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
6839              if (aclBuilder_ == null) {
6840                return acl_;
6841              } else {
6842                return aclBuilder_.getMessage();
6843              }
6844            }
6845            /**
6846             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6847             */
6848            public Builder setAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
6849              if (aclBuilder_ == null) {
6850                if (value == null) {
6851                  throw new NullPointerException();
6852                }
6853                acl_ = value;
6854                onChanged();
6855              } else {
6856                aclBuilder_.setMessage(value);
6857              }
6858              bitField0_ |= 0x00000080;
6859              return this;
6860            }
6861            /**
6862             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6863             */
6864            public Builder setAcl(
6865                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder builderForValue) {
6866              if (aclBuilder_ == null) {
6867                acl_ = builderForValue.build();
6868                onChanged();
6869              } else {
6870                aclBuilder_.setMessage(builderForValue.build());
6871              }
6872              bitField0_ |= 0x00000080;
6873              return this;
6874            }
6875            /**
6876             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6877             */
6878            public Builder mergeAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
6879              if (aclBuilder_ == null) {
6880                if (((bitField0_ & 0x00000080) == 0x00000080) &&
6881                    acl_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance()) {
6882                  acl_ =
6883                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.newBuilder(acl_).mergeFrom(value).buildPartial();
6884                } else {
6885                  acl_ = value;
6886                }
6887                onChanged();
6888              } else {
6889                aclBuilder_.mergeFrom(value);
6890              }
6891              bitField0_ |= 0x00000080;
6892              return this;
6893            }
6894            /**
6895             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6896             */
6897            public Builder clearAcl() {
6898              if (aclBuilder_ == null) {
6899                acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
6900                onChanged();
6901              } else {
6902                aclBuilder_.clear();
6903              }
6904              bitField0_ = (bitField0_ & ~0x00000080);
6905              return this;
6906            }
6907            /**
6908             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6909             */
6910            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder getAclBuilder() {
6911              bitField0_ |= 0x00000080;
6912              onChanged();
6913              return getAclFieldBuilder().getBuilder();
6914            }
6915            /**
6916             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6917             */
6918            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
6919              if (aclBuilder_ != null) {
6920                return aclBuilder_.getMessageOrBuilder();
6921              } else {
6922                return acl_;
6923              }
6924            }
6925            /**
6926             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6927             */
6928            private com.google.protobuf.SingleFieldBuilder<
6929                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> 
6930                getAclFieldBuilder() {
6931              if (aclBuilder_ == null) {
6932                aclBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6933                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder>(
6934                        acl_,
6935                        getParentForChildren(),
6936                        isClean());
6937                acl_ = null;
6938              }
6939              return aclBuilder_;
6940            }
6941    
6942            // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;
6943            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
6944            private com.google.protobuf.SingleFieldBuilder<
6945                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder> xAttrsBuilder_;
6946            /**
6947             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6948             */
6949            public boolean hasXAttrs() {
6950              return ((bitField0_ & 0x00000100) == 0x00000100);
6951            }
6952            /**
6953             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6954             */
6955            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs() {
6956              if (xAttrsBuilder_ == null) {
6957                return xAttrs_;
6958              } else {
6959                return xAttrsBuilder_.getMessage();
6960              }
6961            }
6962            /**
6963             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6964             */
6965            public Builder setXAttrs(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto value) {
6966              if (xAttrsBuilder_ == null) {
6967                if (value == null) {
6968                  throw new NullPointerException();
6969                }
6970                xAttrs_ = value;
6971                onChanged();
6972              } else {
6973                xAttrsBuilder_.setMessage(value);
6974              }
6975              bitField0_ |= 0x00000100;
6976              return this;
6977            }
6978            /**
6979             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6980             */
6981            public Builder setXAttrs(
6982                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder builderForValue) {
6983              if (xAttrsBuilder_ == null) {
6984                xAttrs_ = builderForValue.build();
6985                onChanged();
6986              } else {
6987                xAttrsBuilder_.setMessage(builderForValue.build());
6988              }
6989              bitField0_ |= 0x00000100;
6990              return this;
6991            }
6992            /**
6993             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6994             */
6995            public Builder mergeXAttrs(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto value) {
6996              if (xAttrsBuilder_ == null) {
6997                if (((bitField0_ & 0x00000100) == 0x00000100) &&
6998                    xAttrs_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance()) {
6999                  xAttrs_ =
7000                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.newBuilder(xAttrs_).mergeFrom(value).buildPartial();
7001                } else {
7002                  xAttrs_ = value;
7003                }
7004                onChanged();
7005              } else {
7006                xAttrsBuilder_.mergeFrom(value);
7007              }
7008              bitField0_ |= 0x00000100;
7009              return this;
7010            }
7011            /**
7012             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
7013             */
7014            public Builder clearXAttrs() {
7015              if (xAttrsBuilder_ == null) {
7016                xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
7017                onChanged();
7018              } else {
7019                xAttrsBuilder_.clear();
7020              }
7021              bitField0_ = (bitField0_ & ~0x00000100);
7022              return this;
7023            }
7024            /**
7025             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
7026             */
7027            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder getXAttrsBuilder() {
7028              bitField0_ |= 0x00000100;
7029              onChanged();
7030              return getXAttrsFieldBuilder().getBuilder();
7031            }
7032            /**
7033             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
7034             */
7035            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder() {
7036              if (xAttrsBuilder_ != null) {
7037                return xAttrsBuilder_.getMessageOrBuilder();
7038              } else {
7039                return xAttrs_;
7040              }
7041            }
7042            /**
7043             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
7044             */
7045            private com.google.protobuf.SingleFieldBuilder<
7046                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder> 
7047                getXAttrsFieldBuilder() {
7048              if (xAttrsBuilder_ == null) {
7049                xAttrsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
7050                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder>(
7051                        xAttrs_,
7052                        getParentForChildren(),
7053                        isClean());
7054                xAttrs_ = null;
7055              }
7056              return xAttrsBuilder_;
7057            }
7058    
7059            // optional uint32 storagePolicyID = 10;
7060            private int storagePolicyID_ ;
7061            /**
7062             * <code>optional uint32 storagePolicyID = 10;</code>
7063             */
7064            public boolean hasStoragePolicyID() {
7065              return ((bitField0_ & 0x00000200) == 0x00000200);
7066            }
7067            /**
7068             * <code>optional uint32 storagePolicyID = 10;</code>
7069             */
7070            public int getStoragePolicyID() {
7071              return storagePolicyID_;
7072            }
7073            /**
7074             * <code>optional uint32 storagePolicyID = 10;</code>
7075             */
7076            public Builder setStoragePolicyID(int value) {
7077              bitField0_ |= 0x00000200;
7078              storagePolicyID_ = value;
7079              onChanged();
7080              return this;
7081            }
7082            /**
7083             * <code>optional uint32 storagePolicyID = 10;</code>
7084             */
7085            public Builder clearStoragePolicyID() {
7086              bitField0_ = (bitField0_ & ~0x00000200);
7087              storagePolicyID_ = 0;
7088              onChanged();
7089              return this;
7090            }
7091    
7092            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INodeFile)
7093          }
7094    
7095          static {
7096            defaultInstance = new INodeFile(true);
7097            defaultInstance.initFields();
7098          }
7099    
7100          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INodeFile)
7101        }
7102    
7103        public interface INodeDirectoryOrBuilder
7104            extends com.google.protobuf.MessageOrBuilder {
7105    
7106          // optional uint64 modificationTime = 1;
7107          /**
7108           * <code>optional uint64 modificationTime = 1;</code>
7109           */
7110          boolean hasModificationTime();
7111          /**
7112           * <code>optional uint64 modificationTime = 1;</code>
7113           */
7114          long getModificationTime();
7115    
7116          // optional uint64 nsQuota = 2;
7117          /**
7118           * <code>optional uint64 nsQuota = 2;</code>
7119           *
7120           * <pre>
7121           * namespace quota
7122           * </pre>
7123           */
7124          boolean hasNsQuota();
7125          /**
7126           * <code>optional uint64 nsQuota = 2;</code>
7127           *
7128           * <pre>
7129           * namespace quota
7130           * </pre>
7131           */
7132          long getNsQuota();
7133    
7134          // optional uint64 dsQuota = 3;
7135          /**
7136           * <code>optional uint64 dsQuota = 3;</code>
7137           *
7138           * <pre>
7139           * diskspace quota
7140           * </pre>
7141           */
7142          boolean hasDsQuota();
7143          /**
7144           * <code>optional uint64 dsQuota = 3;</code>
7145           *
7146           * <pre>
7147           * diskspace quota
7148           * </pre>
7149           */
7150          long getDsQuota();
7151    
7152          // optional fixed64 permission = 4;
7153          /**
7154           * <code>optional fixed64 permission = 4;</code>
7155           */
7156          boolean hasPermission();
7157          /**
7158           * <code>optional fixed64 permission = 4;</code>
7159           */
7160          long getPermission();
7161    
7162          // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;
7163          /**
7164           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7165           */
7166          boolean hasAcl();
7167          /**
7168           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7169           */
7170          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl();
7171          /**
7172           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7173           */
7174          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder();
7175    
7176          // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;
7177          /**
7178           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7179           */
7180          boolean hasXAttrs();
7181          /**
7182           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7183           */
7184          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs();
7185          /**
7186           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7187           */
7188          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder();
7189        }
7190        /**
7191         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeDirectory}
7192         */
7193        public static final class INodeDirectory extends
7194            com.google.protobuf.GeneratedMessage
7195            implements INodeDirectoryOrBuilder {
7196          // Use INodeDirectory.newBuilder() to construct.
7197          private INodeDirectory(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7198            super(builder);
7199            this.unknownFields = builder.getUnknownFields();
7200          }
7201          private INodeDirectory(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7202    
7203          private static final INodeDirectory defaultInstance;
7204          public static INodeDirectory getDefaultInstance() {
7205            return defaultInstance;
7206          }
7207    
7208          public INodeDirectory getDefaultInstanceForType() {
7209            return defaultInstance;
7210          }
7211    
7212          private final com.google.protobuf.UnknownFieldSet unknownFields;
7213          @java.lang.Override
7214          public final com.google.protobuf.UnknownFieldSet
7215              getUnknownFields() {
7216            return this.unknownFields;
7217          }
7218          private INodeDirectory(
7219              com.google.protobuf.CodedInputStream input,
7220              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7221              throws com.google.protobuf.InvalidProtocolBufferException {
7222            initFields();
7223            int mutable_bitField0_ = 0;
7224            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7225                com.google.protobuf.UnknownFieldSet.newBuilder();
7226            try {
7227              boolean done = false;
7228              while (!done) {
7229                int tag = input.readTag();
7230                switch (tag) {
7231                  case 0:
7232                    done = true;
7233                    break;
7234                  default: {
7235                    if (!parseUnknownField(input, unknownFields,
7236                                           extensionRegistry, tag)) {
7237                      done = true;
7238                    }
7239                    break;
7240                  }
7241                  case 8: {
7242                    bitField0_ |= 0x00000001;
7243                    modificationTime_ = input.readUInt64();
7244                    break;
7245                  }
7246                  case 16: {
7247                    bitField0_ |= 0x00000002;
7248                    nsQuota_ = input.readUInt64();
7249                    break;
7250                  }
7251                  case 24: {
7252                    bitField0_ |= 0x00000004;
7253                    dsQuota_ = input.readUInt64();
7254                    break;
7255                  }
7256                  case 33: {
7257                    bitField0_ |= 0x00000008;
7258                    permission_ = input.readFixed64();
7259                    break;
7260                  }
7261                  case 42: {
7262                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder subBuilder = null;
7263                    if (((bitField0_ & 0x00000010) == 0x00000010)) {
7264                      subBuilder = acl_.toBuilder();
7265                    }
7266                    acl_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.PARSER, extensionRegistry);
7267                    if (subBuilder != null) {
7268                      subBuilder.mergeFrom(acl_);
7269                      acl_ = subBuilder.buildPartial();
7270                    }
7271                    bitField0_ |= 0x00000010;
7272                    break;
7273                  }
7274                  case 50: {
7275                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder subBuilder = null;
7276                    if (((bitField0_ & 0x00000020) == 0x00000020)) {
7277                      subBuilder = xAttrs_.toBuilder();
7278                    }
7279                    xAttrs_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.PARSER, extensionRegistry);
7280                    if (subBuilder != null) {
7281                      subBuilder.mergeFrom(xAttrs_);
7282                      xAttrs_ = subBuilder.buildPartial();
7283                    }
7284                    bitField0_ |= 0x00000020;
7285                    break;
7286                  }
7287                }
7288              }
7289            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7290              throw e.setUnfinishedMessage(this);
7291            } catch (java.io.IOException e) {
7292              throw new com.google.protobuf.InvalidProtocolBufferException(
7293                  e.getMessage()).setUnfinishedMessage(this);
7294            } finally {
7295              this.unknownFields = unknownFields.build();
7296              makeExtensionsImmutable();
7297            }
7298          }
7299          public static final com.google.protobuf.Descriptors.Descriptor
7300              getDescriptor() {
7301            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
7302          }
7303    
7304          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7305              internalGetFieldAccessorTable() {
7306            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable
7307                .ensureFieldAccessorsInitialized(
7308                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder.class);
7309          }
7310    
7311          public static com.google.protobuf.Parser<INodeDirectory> PARSER =
7312              new com.google.protobuf.AbstractParser<INodeDirectory>() {
7313            public INodeDirectory parsePartialFrom(
7314                com.google.protobuf.CodedInputStream input,
7315                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7316                throws com.google.protobuf.InvalidProtocolBufferException {
7317              return new INodeDirectory(input, extensionRegistry);
7318            }
7319          };
7320    
7321          @java.lang.Override
7322          public com.google.protobuf.Parser<INodeDirectory> getParserForType() {
7323            return PARSER;
7324          }
7325    
7326          private int bitField0_;
7327          // optional uint64 modificationTime = 1;
7328          public static final int MODIFICATIONTIME_FIELD_NUMBER = 1;
7329          private long modificationTime_;
7330          /**
7331           * <code>optional uint64 modificationTime = 1;</code>
7332           */
7333          public boolean hasModificationTime() {
7334            return ((bitField0_ & 0x00000001) == 0x00000001);
7335          }
7336          /**
7337           * <code>optional uint64 modificationTime = 1;</code>
7338           */
7339          public long getModificationTime() {
7340            return modificationTime_;
7341          }
7342    
7343          // optional uint64 nsQuota = 2;
7344          public static final int NSQUOTA_FIELD_NUMBER = 2;
7345          private long nsQuota_;
7346          /**
7347           * <code>optional uint64 nsQuota = 2;</code>
7348           *
7349           * <pre>
7350           * namespace quota
7351           * </pre>
7352           */
7353          public boolean hasNsQuota() {
7354            return ((bitField0_ & 0x00000002) == 0x00000002);
7355          }
7356          /**
7357           * <code>optional uint64 nsQuota = 2;</code>
7358           *
7359           * <pre>
7360           * namespace quota
7361           * </pre>
7362           */
7363          public long getNsQuota() {
7364            return nsQuota_;
7365          }
7366    
7367          // optional uint64 dsQuota = 3;
7368          public static final int DSQUOTA_FIELD_NUMBER = 3;
7369          private long dsQuota_;
7370          /**
7371           * <code>optional uint64 dsQuota = 3;</code>
7372           *
7373           * <pre>
7374           * diskspace quota
7375           * </pre>
7376           */
7377          public boolean hasDsQuota() {
7378            return ((bitField0_ & 0x00000004) == 0x00000004);
7379          }
7380          /**
7381           * <code>optional uint64 dsQuota = 3;</code>
7382           *
7383           * <pre>
7384           * diskspace quota
7385           * </pre>
7386           */
7387          public long getDsQuota() {
7388            return dsQuota_;
7389          }
7390    
7391          // optional fixed64 permission = 4;
7392          public static final int PERMISSION_FIELD_NUMBER = 4;
7393          private long permission_;
7394          /**
7395           * <code>optional fixed64 permission = 4;</code>
7396           */
7397          public boolean hasPermission() {
7398            return ((bitField0_ & 0x00000008) == 0x00000008);
7399          }
7400          /**
7401           * <code>optional fixed64 permission = 4;</code>
7402           */
7403          public long getPermission() {
7404            return permission_;
7405          }
7406    
7407          // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;
7408          public static final int ACL_FIELD_NUMBER = 5;
7409          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_;
7410          /**
7411           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7412           */
7413          public boolean hasAcl() {
7414            return ((bitField0_ & 0x00000010) == 0x00000010);
7415          }
7416          /**
7417           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7418           */
7419          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
7420            return acl_;
7421          }
7422          /**
7423           * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7424           */
7425          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
7426            return acl_;
7427          }
7428    
7429          // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;
7430          public static final int XATTRS_FIELD_NUMBER = 6;
7431          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto xAttrs_;
7432          /**
7433           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7434           */
7435          public boolean hasXAttrs() {
7436            return ((bitField0_ & 0x00000020) == 0x00000020);
7437          }
7438          /**
7439           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7440           */
7441          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs() {
7442            return xAttrs_;
7443          }
7444          /**
7445           * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7446           */
7447          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder() {
7448            return xAttrs_;
7449          }
7450    
7451          private void initFields() {
7452            modificationTime_ = 0L;
7453            nsQuota_ = 0L;
7454            dsQuota_ = 0L;
7455            permission_ = 0L;
7456            acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
7457            xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
7458          }
7459          private byte memoizedIsInitialized = -1;
7460          public final boolean isInitialized() {
7461            byte isInitialized = memoizedIsInitialized;
7462            if (isInitialized != -1) return isInitialized == 1;
7463    
7464            if (hasXAttrs()) {
7465              if (!getXAttrs().isInitialized()) {
7466                memoizedIsInitialized = 0;
7467                return false;
7468              }
7469            }
7470            memoizedIsInitialized = 1;
7471            return true;
7472          }
7473    
7474          public void writeTo(com.google.protobuf.CodedOutputStream output)
7475                              throws java.io.IOException {
7476            getSerializedSize();
7477            if (((bitField0_ & 0x00000001) == 0x00000001)) {
7478              output.writeUInt64(1, modificationTime_);
7479            }
7480            if (((bitField0_ & 0x00000002) == 0x00000002)) {
7481              output.writeUInt64(2, nsQuota_);
7482            }
7483            if (((bitField0_ & 0x00000004) == 0x00000004)) {
7484              output.writeUInt64(3, dsQuota_);
7485            }
7486            if (((bitField0_ & 0x00000008) == 0x00000008)) {
7487              output.writeFixed64(4, permission_);
7488            }
7489            if (((bitField0_ & 0x00000010) == 0x00000010)) {
7490              output.writeMessage(5, acl_);
7491            }
7492            if (((bitField0_ & 0x00000020) == 0x00000020)) {
7493              output.writeMessage(6, xAttrs_);
7494            }
7495            getUnknownFields().writeTo(output);
7496          }
7497    
7498          private int memoizedSerializedSize = -1;
7499          public int getSerializedSize() {
7500            int size = memoizedSerializedSize;
7501            if (size != -1) return size;
7502    
7503            size = 0;
7504            if (((bitField0_ & 0x00000001) == 0x00000001)) {
7505              size += com.google.protobuf.CodedOutputStream
7506                .computeUInt64Size(1, modificationTime_);
7507            }
7508            if (((bitField0_ & 0x00000002) == 0x00000002)) {
7509              size += com.google.protobuf.CodedOutputStream
7510                .computeUInt64Size(2, nsQuota_);
7511            }
7512            if (((bitField0_ & 0x00000004) == 0x00000004)) {
7513              size += com.google.protobuf.CodedOutputStream
7514                .computeUInt64Size(3, dsQuota_);
7515            }
7516            if (((bitField0_ & 0x00000008) == 0x00000008)) {
7517              size += com.google.protobuf.CodedOutputStream
7518                .computeFixed64Size(4, permission_);
7519            }
7520            if (((bitField0_ & 0x00000010) == 0x00000010)) {
7521              size += com.google.protobuf.CodedOutputStream
7522                .computeMessageSize(5, acl_);
7523            }
7524            if (((bitField0_ & 0x00000020) == 0x00000020)) {
7525              size += com.google.protobuf.CodedOutputStream
7526                .computeMessageSize(6, xAttrs_);
7527            }
7528            size += getUnknownFields().getSerializedSize();
7529            memoizedSerializedSize = size;
7530            return size;
7531          }
7532    
7533          private static final long serialVersionUID = 0L;
7534          @java.lang.Override
7535          protected java.lang.Object writeReplace()
7536              throws java.io.ObjectStreamException {
7537            return super.writeReplace();
7538          }
7539    
7540          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7541              com.google.protobuf.ByteString data)
7542              throws com.google.protobuf.InvalidProtocolBufferException {
7543            return PARSER.parseFrom(data);
7544          }
7545          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7546              com.google.protobuf.ByteString data,
7547              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7548              throws com.google.protobuf.InvalidProtocolBufferException {
7549            return PARSER.parseFrom(data, extensionRegistry);
7550          }
7551          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(byte[] data)
7552              throws com.google.protobuf.InvalidProtocolBufferException {
7553            return PARSER.parseFrom(data);
7554          }
7555          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7556              byte[] data,
7557              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7558              throws com.google.protobuf.InvalidProtocolBufferException {
7559            return PARSER.parseFrom(data, extensionRegistry);
7560          }
7561          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(java.io.InputStream input)
7562              throws java.io.IOException {
7563            return PARSER.parseFrom(input);
7564          }
7565          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7566              java.io.InputStream input,
7567              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7568              throws java.io.IOException {
7569            return PARSER.parseFrom(input, extensionRegistry);
7570          }
7571          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseDelimitedFrom(java.io.InputStream input)
7572              throws java.io.IOException {
7573            return PARSER.parseDelimitedFrom(input);
7574          }
7575          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseDelimitedFrom(
7576              java.io.InputStream input,
7577              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7578              throws java.io.IOException {
7579            return PARSER.parseDelimitedFrom(input, extensionRegistry);
7580          }
7581          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7582              com.google.protobuf.CodedInputStream input)
7583              throws java.io.IOException {
7584            return PARSER.parseFrom(input);
7585          }
7586          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7587              com.google.protobuf.CodedInputStream input,
7588              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7589              throws java.io.IOException {
7590            return PARSER.parseFrom(input, extensionRegistry);
7591          }
7592    
7593          public static Builder newBuilder() { return Builder.create(); }
7594          public Builder newBuilderForType() { return newBuilder(); }
7595          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory prototype) {
7596            return newBuilder().mergeFrom(prototype);
7597          }
7598          public Builder toBuilder() { return newBuilder(this); }
7599    
7600          @java.lang.Override
7601          protected Builder newBuilderForType(
7602              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7603            Builder builder = new Builder(parent);
7604            return builder;
7605          }
7606          /**
7607           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeDirectory}
7608           */
7609          public static final class Builder extends
7610              com.google.protobuf.GeneratedMessage.Builder<Builder>
7611             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder {
7612            public static final com.google.protobuf.Descriptors.Descriptor
7613                getDescriptor() {
7614              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
7615            }
7616    
7617            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7618                internalGetFieldAccessorTable() {
7619              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable
7620                  .ensureFieldAccessorsInitialized(
7621                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder.class);
7622            }
7623    
7624            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.newBuilder()
7625            private Builder() {
7626              maybeForceBuilderInitialization();
7627            }
7628    
7629            private Builder(
7630                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7631              super(parent);
7632              maybeForceBuilderInitialization();
7633            }
7634            private void maybeForceBuilderInitialization() {
7635              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7636                getAclFieldBuilder();
7637                getXAttrsFieldBuilder();
7638              }
7639            }
7640            private static Builder create() {
7641              return new Builder();
7642            }
7643    
7644            public Builder clear() {
7645              super.clear();
7646              modificationTime_ = 0L;
7647              bitField0_ = (bitField0_ & ~0x00000001);
7648              nsQuota_ = 0L;
7649              bitField0_ = (bitField0_ & ~0x00000002);
7650              dsQuota_ = 0L;
7651              bitField0_ = (bitField0_ & ~0x00000004);
7652              permission_ = 0L;
7653              bitField0_ = (bitField0_ & ~0x00000008);
7654              if (aclBuilder_ == null) {
7655                acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
7656              } else {
7657                aclBuilder_.clear();
7658              }
7659              bitField0_ = (bitField0_ & ~0x00000010);
7660              if (xAttrsBuilder_ == null) {
7661                xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
7662              } else {
7663                xAttrsBuilder_.clear();
7664              }
7665              bitField0_ = (bitField0_ & ~0x00000020);
7666              return this;
7667            }
7668    
7669            public Builder clone() {
7670              return create().mergeFrom(buildPartial());
7671            }
7672    
7673            public com.google.protobuf.Descriptors.Descriptor
7674                getDescriptorForType() {
7675              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
7676            }
7677    
7678            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDefaultInstanceForType() {
7679              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
7680            }
7681    
7682            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory build() {
7683              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory result = buildPartial();
7684              if (!result.isInitialized()) {
7685                throw newUninitializedMessageException(result);
7686              }
7687              return result;
7688            }
7689    
7690            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory buildPartial() {
7691              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory(this);
7692              int from_bitField0_ = bitField0_;
7693              int to_bitField0_ = 0;
7694              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7695                to_bitField0_ |= 0x00000001;
7696              }
7697              result.modificationTime_ = modificationTime_;
7698              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
7699                to_bitField0_ |= 0x00000002;
7700              }
7701              result.nsQuota_ = nsQuota_;
7702              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
7703                to_bitField0_ |= 0x00000004;
7704              }
7705              result.dsQuota_ = dsQuota_;
7706              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
7707                to_bitField0_ |= 0x00000008;
7708              }
7709              result.permission_ = permission_;
7710              if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
7711                to_bitField0_ |= 0x00000010;
7712              }
7713              if (aclBuilder_ == null) {
7714                result.acl_ = acl_;
7715              } else {
7716                result.acl_ = aclBuilder_.build();
7717              }
7718              if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
7719                to_bitField0_ |= 0x00000020;
7720              }
7721              if (xAttrsBuilder_ == null) {
7722                result.xAttrs_ = xAttrs_;
7723              } else {
7724                result.xAttrs_ = xAttrsBuilder_.build();
7725              }
7726              result.bitField0_ = to_bitField0_;
7727              onBuilt();
7728              return result;
7729            }
7730    
7731            public Builder mergeFrom(com.google.protobuf.Message other) {
7732              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory) {
7733                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory)other);
7734              } else {
7735                super.mergeFrom(other);
7736                return this;
7737              }
7738            }
7739    
7740            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory other) {
7741              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance()) return this;
7742              if (other.hasModificationTime()) {
7743                setModificationTime(other.getModificationTime());
7744              }
7745              if (other.hasNsQuota()) {
7746                setNsQuota(other.getNsQuota());
7747              }
7748              if (other.hasDsQuota()) {
7749                setDsQuota(other.getDsQuota());
7750              }
7751              if (other.hasPermission()) {
7752                setPermission(other.getPermission());
7753              }
7754              if (other.hasAcl()) {
7755                mergeAcl(other.getAcl());
7756              }
7757              if (other.hasXAttrs()) {
7758                mergeXAttrs(other.getXAttrs());
7759              }
7760              this.mergeUnknownFields(other.getUnknownFields());
7761              return this;
7762            }
7763    
7764            public final boolean isInitialized() {
7765              if (hasXAttrs()) {
7766                if (!getXAttrs().isInitialized()) {
7767                  
7768                  return false;
7769                }
7770              }
7771              return true;
7772            }
7773    
7774            public Builder mergeFrom(
7775                com.google.protobuf.CodedInputStream input,
7776                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7777                throws java.io.IOException {
7778              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parsedMessage = null;
7779              try {
7780                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7781              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7782                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory) e.getUnfinishedMessage();
7783                throw e;
7784              } finally {
7785                if (parsedMessage != null) {
7786                  mergeFrom(parsedMessage);
7787                }
7788              }
7789              return this;
7790            }
7791            private int bitField0_;
7792    
7793            // optional uint64 modificationTime = 1;
7794            private long modificationTime_ ;
7795            /**
7796             * <code>optional uint64 modificationTime = 1;</code>
7797             */
7798            public boolean hasModificationTime() {
7799              return ((bitField0_ & 0x00000001) == 0x00000001);
7800            }
7801            /**
7802             * <code>optional uint64 modificationTime = 1;</code>
7803             */
7804            public long getModificationTime() {
7805              return modificationTime_;
7806            }
7807            /**
7808             * <code>optional uint64 modificationTime = 1;</code>
7809             */
7810            public Builder setModificationTime(long value) {
7811              bitField0_ |= 0x00000001;
7812              modificationTime_ = value;
7813              onChanged();
7814              return this;
7815            }
7816            /**
7817             * <code>optional uint64 modificationTime = 1;</code>
7818             */
7819            public Builder clearModificationTime() {
7820              bitField0_ = (bitField0_ & ~0x00000001);
7821              modificationTime_ = 0L;
7822              onChanged();
7823              return this;
7824            }
7825    
7826            // optional uint64 nsQuota = 2;
7827            private long nsQuota_ ;
7828            /**
7829             * <code>optional uint64 nsQuota = 2;</code>
7830             *
7831             * <pre>
7832             * namespace quota
7833             * </pre>
7834             */
7835            public boolean hasNsQuota() {
7836              return ((bitField0_ & 0x00000002) == 0x00000002);
7837            }
7838            /**
7839             * <code>optional uint64 nsQuota = 2;</code>
7840             *
7841             * <pre>
7842             * namespace quota
7843             * </pre>
7844             */
7845            public long getNsQuota() {
7846              return nsQuota_;
7847            }
7848            /**
7849             * <code>optional uint64 nsQuota = 2;</code>
7850             *
7851             * <pre>
7852             * namespace quota
7853             * </pre>
7854             */
7855            public Builder setNsQuota(long value) {
7856              bitField0_ |= 0x00000002;
7857              nsQuota_ = value;
7858              onChanged();
7859              return this;
7860            }
7861            /**
7862             * <code>optional uint64 nsQuota = 2;</code>
7863             *
7864             * <pre>
7865             * namespace quota
7866             * </pre>
7867             */
7868            public Builder clearNsQuota() {
7869              bitField0_ = (bitField0_ & ~0x00000002);
7870              nsQuota_ = 0L;
7871              onChanged();
7872              return this;
7873            }
7874    
7875            // optional uint64 dsQuota = 3;
7876            private long dsQuota_ ;
7877            /**
7878             * <code>optional uint64 dsQuota = 3;</code>
7879             *
7880             * <pre>
7881             * diskspace quota
7882             * </pre>
7883             */
7884            public boolean hasDsQuota() {
7885              return ((bitField0_ & 0x00000004) == 0x00000004);
7886            }
7887            /**
7888             * <code>optional uint64 dsQuota = 3;</code>
7889             *
7890             * <pre>
7891             * diskspace quota
7892             * </pre>
7893             */
7894            public long getDsQuota() {
7895              return dsQuota_;
7896            }
7897            /**
7898             * <code>optional uint64 dsQuota = 3;</code>
7899             *
7900             * <pre>
7901             * diskspace quota
7902             * </pre>
7903             */
7904            public Builder setDsQuota(long value) {
7905              bitField0_ |= 0x00000004;
7906              dsQuota_ = value;
7907              onChanged();
7908              return this;
7909            }
7910            /**
7911             * <code>optional uint64 dsQuota = 3;</code>
7912             *
7913             * <pre>
7914             * diskspace quota
7915             * </pre>
7916             */
7917            public Builder clearDsQuota() {
7918              bitField0_ = (bitField0_ & ~0x00000004);
7919              dsQuota_ = 0L;
7920              onChanged();
7921              return this;
7922            }
7923    
7924            // optional fixed64 permission = 4;
7925            private long permission_ ;
7926            /**
7927             * <code>optional fixed64 permission = 4;</code>
7928             */
7929            public boolean hasPermission() {
7930              return ((bitField0_ & 0x00000008) == 0x00000008);
7931            }
7932            /**
7933             * <code>optional fixed64 permission = 4;</code>
7934             */
7935            public long getPermission() {
7936              return permission_;
7937            }
7938            /**
7939             * <code>optional fixed64 permission = 4;</code>
7940             */
7941            public Builder setPermission(long value) {
7942              bitField0_ |= 0x00000008;
7943              permission_ = value;
7944              onChanged();
7945              return this;
7946            }
7947            /**
7948             * <code>optional fixed64 permission = 4;</code>
7949             */
7950            public Builder clearPermission() {
7951              bitField0_ = (bitField0_ & ~0x00000008);
7952              permission_ = 0L;
7953              onChanged();
7954              return this;
7955            }
7956    
7957            // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;
7958            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
7959            private com.google.protobuf.SingleFieldBuilder<
7960                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> aclBuilder_;
7961            /**
7962             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7963             */
7964            public boolean hasAcl() {
7965              return ((bitField0_ & 0x00000010) == 0x00000010);
7966            }
7967            /**
7968             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7969             */
7970            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
7971              if (aclBuilder_ == null) {
7972                return acl_;
7973              } else {
7974                return aclBuilder_.getMessage();
7975              }
7976            }
7977            /**
7978             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7979             */
7980            public Builder setAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
7981              if (aclBuilder_ == null) {
7982                if (value == null) {
7983                  throw new NullPointerException();
7984                }
7985                acl_ = value;
7986                onChanged();
7987              } else {
7988                aclBuilder_.setMessage(value);
7989              }
7990              bitField0_ |= 0x00000010;
7991              return this;
7992            }
7993            /**
7994             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7995             */
7996            public Builder setAcl(
7997                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder builderForValue) {
7998              if (aclBuilder_ == null) {
7999                acl_ = builderForValue.build();
8000                onChanged();
8001              } else {
8002                aclBuilder_.setMessage(builderForValue.build());
8003              }
8004              bitField0_ |= 0x00000010;
8005              return this;
8006            }
8007            /**
8008             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
8009             */
8010            public Builder mergeAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
8011              if (aclBuilder_ == null) {
8012                if (((bitField0_ & 0x00000010) == 0x00000010) &&
8013                    acl_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance()) {
8014                  acl_ =
8015                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.newBuilder(acl_).mergeFrom(value).buildPartial();
8016                } else {
8017                  acl_ = value;
8018                }
8019                onChanged();
8020              } else {
8021                aclBuilder_.mergeFrom(value);
8022              }
8023              bitField0_ |= 0x00000010;
8024              return this;
8025            }
8026            /**
8027             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
8028             */
8029            public Builder clearAcl() {
8030              if (aclBuilder_ == null) {
8031                acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
8032                onChanged();
8033              } else {
8034                aclBuilder_.clear();
8035              }
8036              bitField0_ = (bitField0_ & ~0x00000010);
8037              return this;
8038            }
8039            /**
8040             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
8041             */
8042            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder getAclBuilder() {
8043              bitField0_ |= 0x00000010;
8044              onChanged();
8045              return getAclFieldBuilder().getBuilder();
8046            }
8047            /**
8048             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
8049             */
8050            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
8051              if (aclBuilder_ != null) {
8052                return aclBuilder_.getMessageOrBuilder();
8053              } else {
8054                return acl_;
8055              }
8056            }
8057            /**
8058             * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
8059             */
8060            private com.google.protobuf.SingleFieldBuilder<
8061                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> 
8062                getAclFieldBuilder() {
8063              if (aclBuilder_ == null) {
8064                aclBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8065                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder>(
8066                        acl_,
8067                        getParentForChildren(),
8068                        isClean());
8069                acl_ = null;
8070              }
8071              return aclBuilder_;
8072            }
8073    
8074            // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;
8075            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
8076            private com.google.protobuf.SingleFieldBuilder<
8077                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder> xAttrsBuilder_;
8078            /**
8079             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8080             */
8081            public boolean hasXAttrs() {
8082              return ((bitField0_ & 0x00000020) == 0x00000020);
8083            }
8084            /**
8085             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8086             */
8087            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs() {
8088              if (xAttrsBuilder_ == null) {
8089                return xAttrs_;
8090              } else {
8091                return xAttrsBuilder_.getMessage();
8092              }
8093            }
8094            /**
8095             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8096             */
8097            public Builder setXAttrs(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto value) {
8098              if (xAttrsBuilder_ == null) {
8099                if (value == null) {
8100                  throw new NullPointerException();
8101                }
8102                xAttrs_ = value;
8103                onChanged();
8104              } else {
8105                xAttrsBuilder_.setMessage(value);
8106              }
8107              bitField0_ |= 0x00000020;
8108              return this;
8109            }
8110            /**
8111             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8112             */
8113            public Builder setXAttrs(
8114                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder builderForValue) {
8115              if (xAttrsBuilder_ == null) {
8116                xAttrs_ = builderForValue.build();
8117                onChanged();
8118              } else {
8119                xAttrsBuilder_.setMessage(builderForValue.build());
8120              }
8121              bitField0_ |= 0x00000020;
8122              return this;
8123            }
8124            /**
8125             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8126             */
8127            public Builder mergeXAttrs(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto value) {
8128              if (xAttrsBuilder_ == null) {
8129                if (((bitField0_ & 0x00000020) == 0x00000020) &&
8130                    xAttrs_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance()) {
8131                  xAttrs_ =
8132                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.newBuilder(xAttrs_).mergeFrom(value).buildPartial();
8133                } else {
8134                  xAttrs_ = value;
8135                }
8136                onChanged();
8137              } else {
8138                xAttrsBuilder_.mergeFrom(value);
8139              }
8140              bitField0_ |= 0x00000020;
8141              return this;
8142            }
8143            /**
8144             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8145             */
8146            public Builder clearXAttrs() {
8147              if (xAttrsBuilder_ == null) {
8148                xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
8149                onChanged();
8150              } else {
8151                xAttrsBuilder_.clear();
8152              }
8153              bitField0_ = (bitField0_ & ~0x00000020);
8154              return this;
8155            }
8156            /**
8157             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8158             */
8159            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder getXAttrsBuilder() {
8160              bitField0_ |= 0x00000020;
8161              onChanged();
8162              return getXAttrsFieldBuilder().getBuilder();
8163            }
8164            /**
8165             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8166             */
8167            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder() {
8168              if (xAttrsBuilder_ != null) {
8169                return xAttrsBuilder_.getMessageOrBuilder();
8170              } else {
8171                return xAttrs_;
8172              }
8173            }
8174            /**
8175             * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8176             */
8177            private com.google.protobuf.SingleFieldBuilder<
8178                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder> 
8179                getXAttrsFieldBuilder() {
8180              if (xAttrsBuilder_ == null) {
8181                xAttrsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8182                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder>(
8183                        xAttrs_,
8184                        getParentForChildren(),
8185                        isClean());
8186                xAttrs_ = null;
8187              }
8188              return xAttrsBuilder_;
8189            }
8190    
8191            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INodeDirectory)
8192          }
8193    
8194          static {
8195            defaultInstance = new INodeDirectory(true);
8196            defaultInstance.initFields();
8197          }
8198    
8199          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INodeDirectory)
8200        }
8201    
8202        public interface INodeSymlinkOrBuilder
8203            extends com.google.protobuf.MessageOrBuilder {
8204    
8205          // optional fixed64 permission = 1;
8206          /**
8207           * <code>optional fixed64 permission = 1;</code>
8208           */
8209          boolean hasPermission();
8210          /**
8211           * <code>optional fixed64 permission = 1;</code>
8212           */
8213          long getPermission();
8214    
8215          // optional bytes target = 2;
8216          /**
8217           * <code>optional bytes target = 2;</code>
8218           */
8219          boolean hasTarget();
8220          /**
8221           * <code>optional bytes target = 2;</code>
8222           */
8223          com.google.protobuf.ByteString getTarget();
8224    
8225          // optional uint64 modificationTime = 3;
8226          /**
8227           * <code>optional uint64 modificationTime = 3;</code>
8228           */
8229          boolean hasModificationTime();
8230          /**
8231           * <code>optional uint64 modificationTime = 3;</code>
8232           */
8233          long getModificationTime();
8234    
8235          // optional uint64 accessTime = 4;
8236          /**
8237           * <code>optional uint64 accessTime = 4;</code>
8238           */
8239          boolean hasAccessTime();
8240          /**
8241           * <code>optional uint64 accessTime = 4;</code>
8242           */
8243          long getAccessTime();
8244        }
8245        /**
8246         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeSymlink}
8247         */
8248        public static final class INodeSymlink extends
8249            com.google.protobuf.GeneratedMessage
8250            implements INodeSymlinkOrBuilder {
8251          // Use INodeSymlink.newBuilder() to construct.
8252          private INodeSymlink(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8253            super(builder);
8254            this.unknownFields = builder.getUnknownFields();
8255          }
8256          private INodeSymlink(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8257    
8258          private static final INodeSymlink defaultInstance;
8259          public static INodeSymlink getDefaultInstance() {
8260            return defaultInstance;
8261          }
8262    
8263          public INodeSymlink getDefaultInstanceForType() {
8264            return defaultInstance;
8265          }
8266    
8267          private final com.google.protobuf.UnknownFieldSet unknownFields;
8268          @java.lang.Override
8269          public final com.google.protobuf.UnknownFieldSet
8270              getUnknownFields() {
8271            return this.unknownFields;
8272          }
8273          private INodeSymlink(
8274              com.google.protobuf.CodedInputStream input,
8275              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8276              throws com.google.protobuf.InvalidProtocolBufferException {
8277            initFields();
8278            int mutable_bitField0_ = 0;
8279            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8280                com.google.protobuf.UnknownFieldSet.newBuilder();
8281            try {
8282              boolean done = false;
8283              while (!done) {
8284                int tag = input.readTag();
8285                switch (tag) {
8286                  case 0:
8287                    done = true;
8288                    break;
8289                  default: {
8290                    if (!parseUnknownField(input, unknownFields,
8291                                           extensionRegistry, tag)) {
8292                      done = true;
8293                    }
8294                    break;
8295                  }
8296                  case 9: {
8297                    bitField0_ |= 0x00000001;
8298                    permission_ = input.readFixed64();
8299                    break;
8300                  }
8301                  case 18: {
8302                    bitField0_ |= 0x00000002;
8303                    target_ = input.readBytes();
8304                    break;
8305                  }
8306                  case 24: {
8307                    bitField0_ |= 0x00000004;
8308                    modificationTime_ = input.readUInt64();
8309                    break;
8310                  }
8311                  case 32: {
8312                    bitField0_ |= 0x00000008;
8313                    accessTime_ = input.readUInt64();
8314                    break;
8315                  }
8316                }
8317              }
8318            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8319              throw e.setUnfinishedMessage(this);
8320            } catch (java.io.IOException e) {
8321              throw new com.google.protobuf.InvalidProtocolBufferException(
8322                  e.getMessage()).setUnfinishedMessage(this);
8323            } finally {
8324              this.unknownFields = unknownFields.build();
8325              makeExtensionsImmutable();
8326            }
8327          }
8328          public static final com.google.protobuf.Descriptors.Descriptor
8329              getDescriptor() {
8330            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
8331          }
8332    
8333          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8334              internalGetFieldAccessorTable() {
8335            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable
8336                .ensureFieldAccessorsInitialized(
8337                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder.class);
8338          }
8339    
8340          public static com.google.protobuf.Parser<INodeSymlink> PARSER =
8341              new com.google.protobuf.AbstractParser<INodeSymlink>() {
8342            public INodeSymlink parsePartialFrom(
8343                com.google.protobuf.CodedInputStream input,
8344                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8345                throws com.google.protobuf.InvalidProtocolBufferException {
8346              return new INodeSymlink(input, extensionRegistry);
8347            }
8348          };
8349    
8350          @java.lang.Override
8351          public com.google.protobuf.Parser<INodeSymlink> getParserForType() {
8352            return PARSER;
8353          }
8354    
8355          private int bitField0_;
8356          // optional fixed64 permission = 1;
8357          public static final int PERMISSION_FIELD_NUMBER = 1;
8358          private long permission_;
8359          /**
8360           * <code>optional fixed64 permission = 1;</code>
8361           */
8362          public boolean hasPermission() {
8363            return ((bitField0_ & 0x00000001) == 0x00000001);
8364          }
8365          /**
8366           * <code>optional fixed64 permission = 1;</code>
8367           */
8368          public long getPermission() {
8369            return permission_;
8370          }
8371    
8372          // optional bytes target = 2;
8373          public static final int TARGET_FIELD_NUMBER = 2;
8374          private com.google.protobuf.ByteString target_;
8375          /**
8376           * <code>optional bytes target = 2;</code>
8377           */
8378          public boolean hasTarget() {
8379            return ((bitField0_ & 0x00000002) == 0x00000002);
8380          }
8381          /**
8382           * <code>optional bytes target = 2;</code>
8383           */
8384          public com.google.protobuf.ByteString getTarget() {
8385            return target_;
8386          }
8387    
8388          // optional uint64 modificationTime = 3;
8389          public static final int MODIFICATIONTIME_FIELD_NUMBER = 3;
8390          private long modificationTime_;
8391          /**
8392           * <code>optional uint64 modificationTime = 3;</code>
8393           */
8394          public boolean hasModificationTime() {
8395            return ((bitField0_ & 0x00000004) == 0x00000004);
8396          }
8397          /**
8398           * <code>optional uint64 modificationTime = 3;</code>
8399           */
8400          public long getModificationTime() {
8401            return modificationTime_;
8402          }
8403    
8404          // optional uint64 accessTime = 4;
8405          public static final int ACCESSTIME_FIELD_NUMBER = 4;
8406          private long accessTime_;
8407          /**
8408           * <code>optional uint64 accessTime = 4;</code>
8409           */
8410          public boolean hasAccessTime() {
8411            return ((bitField0_ & 0x00000008) == 0x00000008);
8412          }
8413          /**
8414           * <code>optional uint64 accessTime = 4;</code>
8415           */
8416          public long getAccessTime() {
8417            return accessTime_;
8418          }
8419    
8420          private void initFields() {
8421            permission_ = 0L;
8422            target_ = com.google.protobuf.ByteString.EMPTY;
8423            modificationTime_ = 0L;
8424            accessTime_ = 0L;
8425          }
8426          private byte memoizedIsInitialized = -1;
8427          public final boolean isInitialized() {
8428            byte isInitialized = memoizedIsInitialized;
8429            if (isInitialized != -1) return isInitialized == 1;
8430    
8431            memoizedIsInitialized = 1;
8432            return true;
8433          }
8434    
8435          public void writeTo(com.google.protobuf.CodedOutputStream output)
8436                              throws java.io.IOException {
8437            getSerializedSize();
8438            if (((bitField0_ & 0x00000001) == 0x00000001)) {
8439              output.writeFixed64(1, permission_);
8440            }
8441            if (((bitField0_ & 0x00000002) == 0x00000002)) {
8442              output.writeBytes(2, target_);
8443            }
8444            if (((bitField0_ & 0x00000004) == 0x00000004)) {
8445              output.writeUInt64(3, modificationTime_);
8446            }
8447            if (((bitField0_ & 0x00000008) == 0x00000008)) {
8448              output.writeUInt64(4, accessTime_);
8449            }
8450            getUnknownFields().writeTo(output);
8451          }
8452    
8453          private int memoizedSerializedSize = -1;
8454          public int getSerializedSize() {
8455            int size = memoizedSerializedSize;
8456            if (size != -1) return size;
8457    
8458            size = 0;
8459            if (((bitField0_ & 0x00000001) == 0x00000001)) {
8460              size += com.google.protobuf.CodedOutputStream
8461                .computeFixed64Size(1, permission_);
8462            }
8463            if (((bitField0_ & 0x00000002) == 0x00000002)) {
8464              size += com.google.protobuf.CodedOutputStream
8465                .computeBytesSize(2, target_);
8466            }
8467            if (((bitField0_ & 0x00000004) == 0x00000004)) {
8468              size += com.google.protobuf.CodedOutputStream
8469                .computeUInt64Size(3, modificationTime_);
8470            }
8471            if (((bitField0_ & 0x00000008) == 0x00000008)) {
8472              size += com.google.protobuf.CodedOutputStream
8473                .computeUInt64Size(4, accessTime_);
8474            }
8475            size += getUnknownFields().getSerializedSize();
8476            memoizedSerializedSize = size;
8477            return size;
8478          }
8479    
8480          private static final long serialVersionUID = 0L;
8481          @java.lang.Override
8482          protected java.lang.Object writeReplace()
8483              throws java.io.ObjectStreamException {
8484            return super.writeReplace();
8485          }
8486    
8487          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8488              com.google.protobuf.ByteString data)
8489              throws com.google.protobuf.InvalidProtocolBufferException {
8490            return PARSER.parseFrom(data);
8491          }
8492          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8493              com.google.protobuf.ByteString data,
8494              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8495              throws com.google.protobuf.InvalidProtocolBufferException {
8496            return PARSER.parseFrom(data, extensionRegistry);
8497          }
8498          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(byte[] data)
8499              throws com.google.protobuf.InvalidProtocolBufferException {
8500            return PARSER.parseFrom(data);
8501          }
8502          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8503              byte[] data,
8504              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8505              throws com.google.protobuf.InvalidProtocolBufferException {
8506            return PARSER.parseFrom(data, extensionRegistry);
8507          }
8508          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(java.io.InputStream input)
8509              throws java.io.IOException {
8510            return PARSER.parseFrom(input);
8511          }
8512          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8513              java.io.InputStream input,
8514              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8515              throws java.io.IOException {
8516            return PARSER.parseFrom(input, extensionRegistry);
8517          }
8518          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseDelimitedFrom(java.io.InputStream input)
8519              throws java.io.IOException {
8520            return PARSER.parseDelimitedFrom(input);
8521          }
8522          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseDelimitedFrom(
8523              java.io.InputStream input,
8524              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8525              throws java.io.IOException {
8526            return PARSER.parseDelimitedFrom(input, extensionRegistry);
8527          }
8528          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8529              com.google.protobuf.CodedInputStream input)
8530              throws java.io.IOException {
8531            return PARSER.parseFrom(input);
8532          }
8533          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8534              com.google.protobuf.CodedInputStream input,
8535              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8536              throws java.io.IOException {
8537            return PARSER.parseFrom(input, extensionRegistry);
8538          }
8539    
8540          public static Builder newBuilder() { return Builder.create(); }
8541          public Builder newBuilderForType() { return newBuilder(); }
8542          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink prototype) {
8543            return newBuilder().mergeFrom(prototype);
8544          }
8545          public Builder toBuilder() { return newBuilder(this); }
8546    
8547          @java.lang.Override
8548          protected Builder newBuilderForType(
8549              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8550            Builder builder = new Builder(parent);
8551            return builder;
8552          }
8553          /**
8554           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeSymlink}
8555           */
8556          public static final class Builder extends
8557              com.google.protobuf.GeneratedMessage.Builder<Builder>
8558             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder {
8559            public static final com.google.protobuf.Descriptors.Descriptor
8560                getDescriptor() {
8561              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
8562            }
8563    
8564            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8565                internalGetFieldAccessorTable() {
8566              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable
8567                  .ensureFieldAccessorsInitialized(
8568                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder.class);
8569            }
8570    
8571            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.newBuilder()
8572            private Builder() {
8573              maybeForceBuilderInitialization();
8574            }
8575    
8576            private Builder(
8577                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8578              super(parent);
8579              maybeForceBuilderInitialization();
8580            }
8581            private void maybeForceBuilderInitialization() {
8582              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8583              }
8584            }
8585            private static Builder create() {
8586              return new Builder();
8587            }
8588    
8589            public Builder clear() {
8590              super.clear();
8591              permission_ = 0L;
8592              bitField0_ = (bitField0_ & ~0x00000001);
8593              target_ = com.google.protobuf.ByteString.EMPTY;
8594              bitField0_ = (bitField0_ & ~0x00000002);
8595              modificationTime_ = 0L;
8596              bitField0_ = (bitField0_ & ~0x00000004);
8597              accessTime_ = 0L;
8598              bitField0_ = (bitField0_ & ~0x00000008);
8599              return this;
8600            }
8601    
8602            public Builder clone() {
8603              return create().mergeFrom(buildPartial());
8604            }
8605    
8606            public com.google.protobuf.Descriptors.Descriptor
8607                getDescriptorForType() {
8608              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
8609            }
8610    
8611            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getDefaultInstanceForType() {
8612              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
8613            }
8614    
8615            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink build() {
8616              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink result = buildPartial();
8617              if (!result.isInitialized()) {
8618                throw newUninitializedMessageException(result);
8619              }
8620              return result;
8621            }
8622    
8623            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink buildPartial() {
8624              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink(this);
8625              int from_bitField0_ = bitField0_;
8626              int to_bitField0_ = 0;
8627              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8628                to_bitField0_ |= 0x00000001;
8629              }
8630              result.permission_ = permission_;
8631              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
8632                to_bitField0_ |= 0x00000002;
8633              }
8634              result.target_ = target_;
8635              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
8636                to_bitField0_ |= 0x00000004;
8637              }
8638              result.modificationTime_ = modificationTime_;
8639              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
8640                to_bitField0_ |= 0x00000008;
8641              }
8642              result.accessTime_ = accessTime_;
8643              result.bitField0_ = to_bitField0_;
8644              onBuilt();
8645              return result;
8646            }
8647    
8648            public Builder mergeFrom(com.google.protobuf.Message other) {
8649              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink) {
8650                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink)other);
8651              } else {
8652                super.mergeFrom(other);
8653                return this;
8654              }
8655            }
8656    
8657            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink other) {
8658              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance()) return this;
8659              if (other.hasPermission()) {
8660                setPermission(other.getPermission());
8661              }
8662              if (other.hasTarget()) {
8663                setTarget(other.getTarget());
8664              }
8665              if (other.hasModificationTime()) {
8666                setModificationTime(other.getModificationTime());
8667              }
8668              if (other.hasAccessTime()) {
8669                setAccessTime(other.getAccessTime());
8670              }
8671              this.mergeUnknownFields(other.getUnknownFields());
8672              return this;
8673            }
8674    
8675            public final boolean isInitialized() {
8676              return true;
8677            }
8678    
8679            public Builder mergeFrom(
8680                com.google.protobuf.CodedInputStream input,
8681                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8682                throws java.io.IOException {
8683              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parsedMessage = null;
8684              try {
8685                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8686              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8687                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink) e.getUnfinishedMessage();
8688                throw e;
8689              } finally {
8690                if (parsedMessage != null) {
8691                  mergeFrom(parsedMessage);
8692                }
8693              }
8694              return this;
8695            }
8696            private int bitField0_;
8697    
8698            // optional fixed64 permission = 1;
8699            private long permission_ ;
8700            /**
8701             * <code>optional fixed64 permission = 1;</code>
8702             */
8703            public boolean hasPermission() {
8704              return ((bitField0_ & 0x00000001) == 0x00000001);
8705            }
8706            /**
8707             * <code>optional fixed64 permission = 1;</code>
8708             */
8709            public long getPermission() {
8710              return permission_;
8711            }
8712            /**
8713             * <code>optional fixed64 permission = 1;</code>
8714             */
8715            public Builder setPermission(long value) {
8716              bitField0_ |= 0x00000001;
8717              permission_ = value;
8718              onChanged();
8719              return this;
8720            }
8721            /**
8722             * <code>optional fixed64 permission = 1;</code>
8723             */
8724            public Builder clearPermission() {
8725              bitField0_ = (bitField0_ & ~0x00000001);
8726              permission_ = 0L;
8727              onChanged();
8728              return this;
8729            }
8730    
8731            // optional bytes target = 2;
8732            private com.google.protobuf.ByteString target_ = com.google.protobuf.ByteString.EMPTY;
8733            /**
8734             * <code>optional bytes target = 2;</code>
8735             */
8736            public boolean hasTarget() {
8737              return ((bitField0_ & 0x00000002) == 0x00000002);
8738            }
8739            /**
8740             * <code>optional bytes target = 2;</code>
8741             */
8742            public com.google.protobuf.ByteString getTarget() {
8743              return target_;
8744            }
8745            /**
8746             * <code>optional bytes target = 2;</code>
8747             */
8748            public Builder setTarget(com.google.protobuf.ByteString value) {
8749              if (value == null) {
8750        throw new NullPointerException();
8751      }
8752      bitField0_ |= 0x00000002;
8753              target_ = value;
8754              onChanged();
8755              return this;
8756            }
8757            /**
8758             * <code>optional bytes target = 2;</code>
8759             */
8760            public Builder clearTarget() {
8761              bitField0_ = (bitField0_ & ~0x00000002);
8762              target_ = getDefaultInstance().getTarget();
8763              onChanged();
8764              return this;
8765            }
8766    
8767            // optional uint64 modificationTime = 3;
8768            private long modificationTime_ ;
8769            /**
8770             * <code>optional uint64 modificationTime = 3;</code>
8771             */
8772            public boolean hasModificationTime() {
8773              return ((bitField0_ & 0x00000004) == 0x00000004);
8774            }
8775            /**
8776             * <code>optional uint64 modificationTime = 3;</code>
8777             */
8778            public long getModificationTime() {
8779              return modificationTime_;
8780            }
8781            /**
8782             * <code>optional uint64 modificationTime = 3;</code>
8783             */
8784            public Builder setModificationTime(long value) {
8785              bitField0_ |= 0x00000004;
8786              modificationTime_ = value;
8787              onChanged();
8788              return this;
8789            }
8790            /**
8791             * <code>optional uint64 modificationTime = 3;</code>
8792             */
8793            public Builder clearModificationTime() {
8794              bitField0_ = (bitField0_ & ~0x00000004);
8795              modificationTime_ = 0L;
8796              onChanged();
8797              return this;
8798            }
8799    
8800            // optional uint64 accessTime = 4;
8801            private long accessTime_ ;
8802            /**
8803             * <code>optional uint64 accessTime = 4;</code>
8804             */
8805            public boolean hasAccessTime() {
8806              return ((bitField0_ & 0x00000008) == 0x00000008);
8807            }
8808            /**
8809             * <code>optional uint64 accessTime = 4;</code>
8810             */
8811            public long getAccessTime() {
8812              return accessTime_;
8813            }
8814            /**
8815             * <code>optional uint64 accessTime = 4;</code>
8816             */
8817            public Builder setAccessTime(long value) {
8818              bitField0_ |= 0x00000008;
8819              accessTime_ = value;
8820              onChanged();
8821              return this;
8822            }
8823            /**
8824             * <code>optional uint64 accessTime = 4;</code>
8825             */
8826            public Builder clearAccessTime() {
8827              bitField0_ = (bitField0_ & ~0x00000008);
8828              accessTime_ = 0L;
8829              onChanged();
8830              return this;
8831            }
8832    
8833            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INodeSymlink)
8834          }
8835    
8836          static {
8837            defaultInstance = new INodeSymlink(true);
8838            defaultInstance.initFields();
8839          }
8840    
8841          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INodeSymlink)
8842        }
8843    
8844        public interface INodeOrBuilder
8845            extends com.google.protobuf.MessageOrBuilder {
8846    
8847          // required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;
8848          /**
8849           * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
8850           */
8851          boolean hasType();
8852          /**
8853           * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
8854           */
8855          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type getType();
8856    
8857          // required uint64 id = 2;
8858          /**
8859           * <code>required uint64 id = 2;</code>
8860           */
8861          boolean hasId();
8862          /**
8863           * <code>required uint64 id = 2;</code>
8864           */
8865          long getId();
8866    
8867          // optional bytes name = 3;
8868          /**
8869           * <code>optional bytes name = 3;</code>
8870           */
8871          boolean hasName();
8872          /**
8873           * <code>optional bytes name = 3;</code>
8874           */
8875          com.google.protobuf.ByteString getName();
8876    
8877          // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;
8878          /**
8879           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8880           */
8881          boolean hasFile();
8882          /**
8883           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8884           */
8885          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getFile();
8886          /**
8887           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8888           */
8889          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getFileOrBuilder();
8890    
8891          // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;
8892          /**
8893           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8894           */
8895          boolean hasDirectory();
8896          /**
8897           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8898           */
8899          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDirectory();
8900          /**
8901           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8902           */
8903          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getDirectoryOrBuilder();
8904    
8905          // optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;
8906          /**
8907           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8908           */
8909          boolean hasSymlink();
8910          /**
8911           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8912           */
8913          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getSymlink();
8914          /**
8915           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8916           */
8917          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder getSymlinkOrBuilder();
8918        }
8919        /**
8920         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INode}
8921         */
8922        public static final class INode extends
8923            com.google.protobuf.GeneratedMessage
8924            implements INodeOrBuilder {
8925          // Use INode.newBuilder() to construct.
8926          private INode(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8927            super(builder);
8928            this.unknownFields = builder.getUnknownFields();
8929          }
8930          private INode(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8931    
8932          private static final INode defaultInstance;
8933          public static INode getDefaultInstance() {
8934            return defaultInstance;
8935          }
8936    
8937          public INode getDefaultInstanceForType() {
8938            return defaultInstance;
8939          }
8940    
8941          private final com.google.protobuf.UnknownFieldSet unknownFields;
8942          @java.lang.Override
8943          public final com.google.protobuf.UnknownFieldSet
8944              getUnknownFields() {
8945            return this.unknownFields;
8946          }
8947          private INode(
8948              com.google.protobuf.CodedInputStream input,
8949              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8950              throws com.google.protobuf.InvalidProtocolBufferException {
8951            initFields();
8952            int mutable_bitField0_ = 0;
8953            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8954                com.google.protobuf.UnknownFieldSet.newBuilder();
8955            try {
8956              boolean done = false;
8957              while (!done) {
8958                int tag = input.readTag();
8959                switch (tag) {
8960                  case 0:
8961                    done = true;
8962                    break;
8963                  default: {
8964                    if (!parseUnknownField(input, unknownFields,
8965                                           extensionRegistry, tag)) {
8966                      done = true;
8967                    }
8968                    break;
8969                  }
8970                  case 8: {
8971                    int rawValue = input.readEnum();
8972                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type value = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.valueOf(rawValue);
8973                    if (value == null) {
8974                      unknownFields.mergeVarintField(1, rawValue);
8975                    } else {
8976                      bitField0_ |= 0x00000001;
8977                      type_ = value;
8978                    }
8979                    break;
8980                  }
8981                  case 16: {
8982                    bitField0_ |= 0x00000002;
8983                    id_ = input.readUInt64();
8984                    break;
8985                  }
8986                  case 26: {
8987                    bitField0_ |= 0x00000004;
8988                    name_ = input.readBytes();
8989                    break;
8990                  }
8991                  case 34: {
8992                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder subBuilder = null;
8993                    if (((bitField0_ & 0x00000008) == 0x00000008)) {
8994                      subBuilder = file_.toBuilder();
8995                    }
8996                    file_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.PARSER, extensionRegistry);
8997                    if (subBuilder != null) {
8998                      subBuilder.mergeFrom(file_);
8999                      file_ = subBuilder.buildPartial();
9000                    }
9001                    bitField0_ |= 0x00000008;
9002                    break;
9003                  }
9004                  case 42: {
9005                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder subBuilder = null;
9006                    if (((bitField0_ & 0x00000010) == 0x00000010)) {
9007                      subBuilder = directory_.toBuilder();
9008                    }
9009                    directory_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.PARSER, extensionRegistry);
9010                    if (subBuilder != null) {
9011                      subBuilder.mergeFrom(directory_);
9012                      directory_ = subBuilder.buildPartial();
9013                    }
9014                    bitField0_ |= 0x00000010;
9015                    break;
9016                  }
9017                  case 50: {
9018                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder subBuilder = null;
9019                    if (((bitField0_ & 0x00000020) == 0x00000020)) {
9020                      subBuilder = symlink_.toBuilder();
9021                    }
9022                    symlink_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.PARSER, extensionRegistry);
9023                    if (subBuilder != null) {
9024                      subBuilder.mergeFrom(symlink_);
9025                      symlink_ = subBuilder.buildPartial();
9026                    }
9027                    bitField0_ |= 0x00000020;
9028                    break;
9029                  }
9030                }
9031              }
9032            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9033              throw e.setUnfinishedMessage(this);
9034            } catch (java.io.IOException e) {
9035              throw new com.google.protobuf.InvalidProtocolBufferException(
9036                  e.getMessage()).setUnfinishedMessage(this);
9037            } finally {
9038              this.unknownFields = unknownFields.build();
9039              makeExtensionsImmutable();
9040            }
9041          }
9042          public static final com.google.protobuf.Descriptors.Descriptor
9043              getDescriptor() {
9044            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
9045          }
9046    
9047          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9048              internalGetFieldAccessorTable() {
9049            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable
9050                .ensureFieldAccessorsInitialized(
9051                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder.class);
9052          }
9053    
9054          public static com.google.protobuf.Parser<INode> PARSER =
9055              new com.google.protobuf.AbstractParser<INode>() {
9056            public INode parsePartialFrom(
9057                com.google.protobuf.CodedInputStream input,
9058                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9059                throws com.google.protobuf.InvalidProtocolBufferException {
9060              return new INode(input, extensionRegistry);
9061            }
9062          };
9063    
9064          @java.lang.Override
9065          public com.google.protobuf.Parser<INode> getParserForType() {
9066            return PARSER;
9067          }
9068    
9069          /**
9070           * Protobuf enum {@code hadoop.hdfs.fsimage.INodeSection.INode.Type}
9071           */
9072          public enum Type
9073              implements com.google.protobuf.ProtocolMessageEnum {
9074            /**
9075             * <code>FILE = 1;</code>
9076             */
9077            FILE(0, 1),
9078            /**
9079             * <code>DIRECTORY = 2;</code>
9080             */
9081            DIRECTORY(1, 2),
9082            /**
9083             * <code>SYMLINK = 3;</code>
9084             */
9085            SYMLINK(2, 3),
9086            ;
9087    
9088            /**
9089             * <code>FILE = 1;</code>
9090             */
9091            public static final int FILE_VALUE = 1;
9092            /**
9093             * <code>DIRECTORY = 2;</code>
9094             */
9095            public static final int DIRECTORY_VALUE = 2;
9096            /**
9097             * <code>SYMLINK = 3;</code>
9098             */
9099            public static final int SYMLINK_VALUE = 3;
9100    
9101    
9102            public final int getNumber() { return value; }
9103    
9104            public static Type valueOf(int value) {
9105              switch (value) {
9106                case 1: return FILE;
9107                case 2: return DIRECTORY;
9108                case 3: return SYMLINK;
9109                default: return null;
9110              }
9111            }
9112    
9113            public static com.google.protobuf.Internal.EnumLiteMap<Type>
9114                internalGetValueMap() {
9115              return internalValueMap;
9116            }
9117            private static com.google.protobuf.Internal.EnumLiteMap<Type>
9118                internalValueMap =
9119                  new com.google.protobuf.Internal.EnumLiteMap<Type>() {
9120                    public Type findValueByNumber(int number) {
9121                      return Type.valueOf(number);
9122                    }
9123                  };
9124    
9125            public final com.google.protobuf.Descriptors.EnumValueDescriptor
9126                getValueDescriptor() {
9127              return getDescriptor().getValues().get(index);
9128            }
9129            public final com.google.protobuf.Descriptors.EnumDescriptor
9130                getDescriptorForType() {
9131              return getDescriptor();
9132            }
9133            public static final com.google.protobuf.Descriptors.EnumDescriptor
9134                getDescriptor() {
9135              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDescriptor().getEnumTypes().get(0);
9136            }
9137    
9138            private static final Type[] VALUES = values();
9139    
9140            public static Type valueOf(
9141                com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
9142              if (desc.getType() != getDescriptor()) {
9143                throw new java.lang.IllegalArgumentException(
9144                  "EnumValueDescriptor is not for this type.");
9145              }
9146              return VALUES[desc.getIndex()];
9147            }
9148    
9149            private final int index;
9150            private final int value;
9151    
9152            private Type(int index, int value) {
9153              this.index = index;
9154              this.value = value;
9155            }
9156    
9157            // @@protoc_insertion_point(enum_scope:hadoop.hdfs.fsimage.INodeSection.INode.Type)
9158          }
9159    
9160          private int bitField0_;
9161          // required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;
9162          public static final int TYPE_FIELD_NUMBER = 1;
9163          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type type_;
9164          /**
9165           * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9166           */
9167          public boolean hasType() {
9168            return ((bitField0_ & 0x00000001) == 0x00000001);
9169          }
9170          /**
9171           * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9172           */
9173          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type getType() {
9174            return type_;
9175          }
9176    
9177          // required uint64 id = 2;
9178          public static final int ID_FIELD_NUMBER = 2;
9179          private long id_;
9180          /**
9181           * <code>required uint64 id = 2;</code>
9182           */
9183          public boolean hasId() {
9184            return ((bitField0_ & 0x00000002) == 0x00000002);
9185          }
9186          /**
9187           * <code>required uint64 id = 2;</code>
9188           */
9189          public long getId() {
9190            return id_;
9191          }
9192    
9193          // optional bytes name = 3;
9194          public static final int NAME_FIELD_NUMBER = 3;
9195          private com.google.protobuf.ByteString name_;
9196          /**
9197           * <code>optional bytes name = 3;</code>
9198           */
9199          public boolean hasName() {
9200            return ((bitField0_ & 0x00000004) == 0x00000004);
9201          }
9202          /**
9203           * <code>optional bytes name = 3;</code>
9204           */
9205          public com.google.protobuf.ByteString getName() {
9206            return name_;
9207          }
9208    
9209          // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;
9210          public static final int FILE_FIELD_NUMBER = 4;
9211          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile file_;
9212          /**
9213           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9214           */
9215          public boolean hasFile() {
9216            return ((bitField0_ & 0x00000008) == 0x00000008);
9217          }
9218          /**
9219           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9220           */
9221          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getFile() {
9222            return file_;
9223          }
9224          /**
9225           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9226           */
9227          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getFileOrBuilder() {
9228            return file_;
9229          }
9230    
9231          // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;
9232          public static final int DIRECTORY_FIELD_NUMBER = 5;
9233          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory directory_;
9234          /**
9235           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9236           */
9237          public boolean hasDirectory() {
9238            return ((bitField0_ & 0x00000010) == 0x00000010);
9239          }
9240          /**
9241           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9242           */
9243          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDirectory() {
9244            return directory_;
9245          }
9246          /**
9247           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9248           */
9249          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getDirectoryOrBuilder() {
9250            return directory_;
9251          }
9252    
9253          // optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;
9254          public static final int SYMLINK_FIELD_NUMBER = 6;
9255          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink symlink_;
9256          /**
9257           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9258           */
9259          public boolean hasSymlink() {
9260            return ((bitField0_ & 0x00000020) == 0x00000020);
9261          }
9262          /**
9263           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9264           */
9265          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getSymlink() {
9266            return symlink_;
9267          }
9268          /**
9269           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9270           */
9271          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder getSymlinkOrBuilder() {
9272            return symlink_;
9273          }
9274    
9275          private void initFields() {
9276            type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
9277            id_ = 0L;
9278            name_ = com.google.protobuf.ByteString.EMPTY;
9279            file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
9280            directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
9281            symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
9282          }
9283          private byte memoizedIsInitialized = -1;
9284          public final boolean isInitialized() {
9285            byte isInitialized = memoizedIsInitialized;
9286            if (isInitialized != -1) return isInitialized == 1;
9287    
9288            if (!hasType()) {
9289              memoizedIsInitialized = 0;
9290              return false;
9291            }
9292            if (!hasId()) {
9293              memoizedIsInitialized = 0;
9294              return false;
9295            }
9296            if (hasFile()) {
9297              if (!getFile().isInitialized()) {
9298                memoizedIsInitialized = 0;
9299                return false;
9300              }
9301            }
9302            if (hasDirectory()) {
9303              if (!getDirectory().isInitialized()) {
9304                memoizedIsInitialized = 0;
9305                return false;
9306              }
9307            }
9308            memoizedIsInitialized = 1;
9309            return true;
9310          }
9311    
9312          public void writeTo(com.google.protobuf.CodedOutputStream output)
9313                              throws java.io.IOException {
9314            getSerializedSize();
9315            if (((bitField0_ & 0x00000001) == 0x00000001)) {
9316              output.writeEnum(1, type_.getNumber());
9317            }
9318            if (((bitField0_ & 0x00000002) == 0x00000002)) {
9319              output.writeUInt64(2, id_);
9320            }
9321            if (((bitField0_ & 0x00000004) == 0x00000004)) {
9322              output.writeBytes(3, name_);
9323            }
9324            if (((bitField0_ & 0x00000008) == 0x00000008)) {
9325              output.writeMessage(4, file_);
9326            }
9327            if (((bitField0_ & 0x00000010) == 0x00000010)) {
9328              output.writeMessage(5, directory_);
9329            }
9330            if (((bitField0_ & 0x00000020) == 0x00000020)) {
9331              output.writeMessage(6, symlink_);
9332            }
9333            getUnknownFields().writeTo(output);
9334          }
9335    
9336          private int memoizedSerializedSize = -1;
9337          public int getSerializedSize() {
9338            int size = memoizedSerializedSize;
9339            if (size != -1) return size;
9340    
9341            size = 0;
9342            if (((bitField0_ & 0x00000001) == 0x00000001)) {
9343              size += com.google.protobuf.CodedOutputStream
9344                .computeEnumSize(1, type_.getNumber());
9345            }
9346            if (((bitField0_ & 0x00000002) == 0x00000002)) {
9347              size += com.google.protobuf.CodedOutputStream
9348                .computeUInt64Size(2, id_);
9349            }
9350            if (((bitField0_ & 0x00000004) == 0x00000004)) {
9351              size += com.google.protobuf.CodedOutputStream
9352                .computeBytesSize(3, name_);
9353            }
9354            if (((bitField0_ & 0x00000008) == 0x00000008)) {
9355              size += com.google.protobuf.CodedOutputStream
9356                .computeMessageSize(4, file_);
9357            }
9358            if (((bitField0_ & 0x00000010) == 0x00000010)) {
9359              size += com.google.protobuf.CodedOutputStream
9360                .computeMessageSize(5, directory_);
9361            }
9362            if (((bitField0_ & 0x00000020) == 0x00000020)) {
9363              size += com.google.protobuf.CodedOutputStream
9364                .computeMessageSize(6, symlink_);
9365            }
9366            size += getUnknownFields().getSerializedSize();
9367            memoizedSerializedSize = size;
9368            return size;
9369          }
9370    
9371          private static final long serialVersionUID = 0L;
9372          @java.lang.Override
9373          protected java.lang.Object writeReplace()
9374              throws java.io.ObjectStreamException {
9375            return super.writeReplace();
9376          }
9377    
9378          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9379              com.google.protobuf.ByteString data)
9380              throws com.google.protobuf.InvalidProtocolBufferException {
9381            return PARSER.parseFrom(data);
9382          }
9383          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9384              com.google.protobuf.ByteString data,
9385              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9386              throws com.google.protobuf.InvalidProtocolBufferException {
9387            return PARSER.parseFrom(data, extensionRegistry);
9388          }
9389          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(byte[] data)
9390              throws com.google.protobuf.InvalidProtocolBufferException {
9391            return PARSER.parseFrom(data);
9392          }
9393          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9394              byte[] data,
9395              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9396              throws com.google.protobuf.InvalidProtocolBufferException {
9397            return PARSER.parseFrom(data, extensionRegistry);
9398          }
9399          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(java.io.InputStream input)
9400              throws java.io.IOException {
9401            return PARSER.parseFrom(input);
9402          }
9403          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9404              java.io.InputStream input,
9405              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9406              throws java.io.IOException {
9407            return PARSER.parseFrom(input, extensionRegistry);
9408          }
9409          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseDelimitedFrom(java.io.InputStream input)
9410              throws java.io.IOException {
9411            return PARSER.parseDelimitedFrom(input);
9412          }
9413          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseDelimitedFrom(
9414              java.io.InputStream input,
9415              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9416              throws java.io.IOException {
9417            return PARSER.parseDelimitedFrom(input, extensionRegistry);
9418          }
9419          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9420              com.google.protobuf.CodedInputStream input)
9421              throws java.io.IOException {
9422            return PARSER.parseFrom(input);
9423          }
9424          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9425              com.google.protobuf.CodedInputStream input,
9426              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9427              throws java.io.IOException {
9428            return PARSER.parseFrom(input, extensionRegistry);
9429          }
9430    
9431          public static Builder newBuilder() { return Builder.create(); }
9432          public Builder newBuilderForType() { return newBuilder(); }
9433          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode prototype) {
9434            return newBuilder().mergeFrom(prototype);
9435          }
9436          public Builder toBuilder() { return newBuilder(this); }
9437    
9438          @java.lang.Override
9439          protected Builder newBuilderForType(
9440              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9441            Builder builder = new Builder(parent);
9442            return builder;
9443          }
9444          /**
9445           * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INode}
9446           */
9447          public static final class Builder extends
9448              com.google.protobuf.GeneratedMessage.Builder<Builder>
9449             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder {
9450            public static final com.google.protobuf.Descriptors.Descriptor
9451                getDescriptor() {
9452              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
9453            }
9454    
9455            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9456                internalGetFieldAccessorTable() {
9457              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable
9458                  .ensureFieldAccessorsInitialized(
9459                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder.class);
9460            }
9461    
9462            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.newBuilder()
9463            private Builder() {
9464              maybeForceBuilderInitialization();
9465            }
9466    
9467            private Builder(
9468                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9469              super(parent);
9470              maybeForceBuilderInitialization();
9471            }
9472            private void maybeForceBuilderInitialization() {
9473              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9474                getFileFieldBuilder();
9475                getDirectoryFieldBuilder();
9476                getSymlinkFieldBuilder();
9477              }
9478            }
9479            private static Builder create() {
9480              return new Builder();
9481            }
9482    
9483            public Builder clear() {
9484              super.clear();
9485              type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
9486              bitField0_ = (bitField0_ & ~0x00000001);
9487              id_ = 0L;
9488              bitField0_ = (bitField0_ & ~0x00000002);
9489              name_ = com.google.protobuf.ByteString.EMPTY;
9490              bitField0_ = (bitField0_ & ~0x00000004);
9491              if (fileBuilder_ == null) {
9492                file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
9493              } else {
9494                fileBuilder_.clear();
9495              }
9496              bitField0_ = (bitField0_ & ~0x00000008);
9497              if (directoryBuilder_ == null) {
9498                directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
9499              } else {
9500                directoryBuilder_.clear();
9501              }
9502              bitField0_ = (bitField0_ & ~0x00000010);
9503              if (symlinkBuilder_ == null) {
9504                symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
9505              } else {
9506                symlinkBuilder_.clear();
9507              }
9508              bitField0_ = (bitField0_ & ~0x00000020);
9509              return this;
9510            }
9511    
9512            public Builder clone() {
9513              return create().mergeFrom(buildPartial());
9514            }
9515    
9516            public com.google.protobuf.Descriptors.Descriptor
9517                getDescriptorForType() {
9518              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
9519            }
9520    
9521            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getDefaultInstanceForType() {
9522              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
9523            }
9524    
9525            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode build() {
9526              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode result = buildPartial();
9527              if (!result.isInitialized()) {
9528                throw newUninitializedMessageException(result);
9529              }
9530              return result;
9531            }
9532    
9533            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode buildPartial() {
9534              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode(this);
9535              int from_bitField0_ = bitField0_;
9536              int to_bitField0_ = 0;
9537              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9538                to_bitField0_ |= 0x00000001;
9539              }
9540              result.type_ = type_;
9541              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
9542                to_bitField0_ |= 0x00000002;
9543              }
9544              result.id_ = id_;
9545              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
9546                to_bitField0_ |= 0x00000004;
9547              }
9548              result.name_ = name_;
9549              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
9550                to_bitField0_ |= 0x00000008;
9551              }
9552              if (fileBuilder_ == null) {
9553                result.file_ = file_;
9554              } else {
9555                result.file_ = fileBuilder_.build();
9556              }
9557              if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
9558                to_bitField0_ |= 0x00000010;
9559              }
9560              if (directoryBuilder_ == null) {
9561                result.directory_ = directory_;
9562              } else {
9563                result.directory_ = directoryBuilder_.build();
9564              }
9565              if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
9566                to_bitField0_ |= 0x00000020;
9567              }
9568              if (symlinkBuilder_ == null) {
9569                result.symlink_ = symlink_;
9570              } else {
9571                result.symlink_ = symlinkBuilder_.build();
9572              }
9573              result.bitField0_ = to_bitField0_;
9574              onBuilt();
9575              return result;
9576            }
9577    
9578            public Builder mergeFrom(com.google.protobuf.Message other) {
9579              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode) {
9580                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode)other);
9581              } else {
9582                super.mergeFrom(other);
9583                return this;
9584              }
9585            }
9586    
9587            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode other) {
9588              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance()) return this;
9589              if (other.hasType()) {
9590                setType(other.getType());
9591              }
9592              if (other.hasId()) {
9593                setId(other.getId());
9594              }
9595              if (other.hasName()) {
9596                setName(other.getName());
9597              }
9598              if (other.hasFile()) {
9599                mergeFile(other.getFile());
9600              }
9601              if (other.hasDirectory()) {
9602                mergeDirectory(other.getDirectory());
9603              }
9604              if (other.hasSymlink()) {
9605                mergeSymlink(other.getSymlink());
9606              }
9607              this.mergeUnknownFields(other.getUnknownFields());
9608              return this;
9609            }
9610    
9611            public final boolean isInitialized() {
9612              if (!hasType()) {
9613                
9614                return false;
9615              }
9616              if (!hasId()) {
9617                
9618                return false;
9619              }
9620              if (hasFile()) {
9621                if (!getFile().isInitialized()) {
9622                  
9623                  return false;
9624                }
9625              }
9626              if (hasDirectory()) {
9627                if (!getDirectory().isInitialized()) {
9628                  
9629                  return false;
9630                }
9631              }
9632              return true;
9633            }
9634    
9635            public Builder mergeFrom(
9636                com.google.protobuf.CodedInputStream input,
9637                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9638                throws java.io.IOException {
9639              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parsedMessage = null;
9640              try {
9641                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9642              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9643                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode) e.getUnfinishedMessage();
9644                throw e;
9645              } finally {
9646                if (parsedMessage != null) {
9647                  mergeFrom(parsedMessage);
9648                }
9649              }
9650              return this;
9651            }
9652            private int bitField0_;
9653    
9654            // required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;
9655            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
9656            /**
9657             * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9658             */
9659            public boolean hasType() {
9660              return ((bitField0_ & 0x00000001) == 0x00000001);
9661            }
9662            /**
9663             * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9664             */
9665            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type getType() {
9666              return type_;
9667            }
9668            /**
9669             * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9670             */
9671            public Builder setType(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type value) {
9672              if (value == null) {
9673                throw new NullPointerException();
9674              }
9675              bitField0_ |= 0x00000001;
9676              type_ = value;
9677              onChanged();
9678              return this;
9679            }
9680            /**
9681             * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9682             */
9683            public Builder clearType() {
9684              bitField0_ = (bitField0_ & ~0x00000001);
9685              type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
9686              onChanged();
9687              return this;
9688            }
9689    
9690            // required uint64 id = 2;
9691            private long id_ ;
9692            /**
9693             * <code>required uint64 id = 2;</code>
9694             */
9695            public boolean hasId() {
9696              return ((bitField0_ & 0x00000002) == 0x00000002);
9697            }
9698            /**
9699             * <code>required uint64 id = 2;</code>
9700             */
9701            public long getId() {
9702              return id_;
9703            }
9704            /**
9705             * <code>required uint64 id = 2;</code>
9706             */
9707            public Builder setId(long value) {
9708              bitField0_ |= 0x00000002;
9709              id_ = value;
9710              onChanged();
9711              return this;
9712            }
9713            /**
9714             * <code>required uint64 id = 2;</code>
9715             */
9716            public Builder clearId() {
9717              bitField0_ = (bitField0_ & ~0x00000002);
9718              id_ = 0L;
9719              onChanged();
9720              return this;
9721            }
9722    
9723            // optional bytes name = 3;
9724            private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
9725            /**
9726             * <code>optional bytes name = 3;</code>
9727             */
9728            public boolean hasName() {
9729              return ((bitField0_ & 0x00000004) == 0x00000004);
9730            }
9731            /**
9732             * <code>optional bytes name = 3;</code>
9733             */
9734            public com.google.protobuf.ByteString getName() {
9735              return name_;
9736            }
9737            /**
9738             * <code>optional bytes name = 3;</code>
9739             */
9740            public Builder setName(com.google.protobuf.ByteString value) {
9741              if (value == null) {
9742        throw new NullPointerException();
9743      }
9744      bitField0_ |= 0x00000004;
9745              name_ = value;
9746              onChanged();
9747              return this;
9748            }
9749            /**
9750             * <code>optional bytes name = 3;</code>
9751             */
9752            public Builder clearName() {
9753              bitField0_ = (bitField0_ & ~0x00000004);
9754              name_ = getDefaultInstance().getName();
9755              onChanged();
9756              return this;
9757            }
9758    
9759            // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;
9760            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
9761            private com.google.protobuf.SingleFieldBuilder<
9762                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> fileBuilder_;
9763            /**
9764             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9765             */
9766            public boolean hasFile() {
9767              return ((bitField0_ & 0x00000008) == 0x00000008);
9768            }
9769            /**
9770             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9771             */
9772            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getFile() {
9773              if (fileBuilder_ == null) {
9774                return file_;
9775              } else {
9776                return fileBuilder_.getMessage();
9777              }
9778            }
9779            /**
9780             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9781             */
9782            public Builder setFile(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
9783              if (fileBuilder_ == null) {
9784                if (value == null) {
9785                  throw new NullPointerException();
9786                }
9787                file_ = value;
9788                onChanged();
9789              } else {
9790                fileBuilder_.setMessage(value);
9791              }
9792              bitField0_ |= 0x00000008;
9793              return this;
9794            }
9795            /**
9796             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9797             */
9798            public Builder setFile(
9799                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder builderForValue) {
9800              if (fileBuilder_ == null) {
9801                file_ = builderForValue.build();
9802                onChanged();
9803              } else {
9804                fileBuilder_.setMessage(builderForValue.build());
9805              }
9806              bitField0_ |= 0x00000008;
9807              return this;
9808            }
9809            /**
9810             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9811             */
9812            public Builder mergeFile(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
9813              if (fileBuilder_ == null) {
9814                if (((bitField0_ & 0x00000008) == 0x00000008) &&
9815                    file_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance()) {
9816                  file_ =
9817                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.newBuilder(file_).mergeFrom(value).buildPartial();
9818                } else {
9819                  file_ = value;
9820                }
9821                onChanged();
9822              } else {
9823                fileBuilder_.mergeFrom(value);
9824              }
9825              bitField0_ |= 0x00000008;
9826              return this;
9827            }
9828            /**
9829             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9830             */
9831            public Builder clearFile() {
9832              if (fileBuilder_ == null) {
9833                file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
9834                onChanged();
9835              } else {
9836                fileBuilder_.clear();
9837              }
9838              bitField0_ = (bitField0_ & ~0x00000008);
9839              return this;
9840            }
9841            /**
9842             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9843             */
9844            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder getFileBuilder() {
9845              bitField0_ |= 0x00000008;
9846              onChanged();
9847              return getFileFieldBuilder().getBuilder();
9848            }
9849            /**
9850             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9851             */
9852            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getFileOrBuilder() {
9853              if (fileBuilder_ != null) {
9854                return fileBuilder_.getMessageOrBuilder();
9855              } else {
9856                return file_;
9857              }
9858            }
9859            /**
9860             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9861             */
9862            private com.google.protobuf.SingleFieldBuilder<
9863                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> 
9864                getFileFieldBuilder() {
9865              if (fileBuilder_ == null) {
9866                fileBuilder_ = new com.google.protobuf.SingleFieldBuilder<
9867                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder>(
9868                        file_,
9869                        getParentForChildren(),
9870                        isClean());
9871                file_ = null;
9872              }
9873              return fileBuilder_;
9874            }
9875    
9876            // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;
9877            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
9878            private com.google.protobuf.SingleFieldBuilder<
9879                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> directoryBuilder_;
9880            /**
9881             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9882             */
9883            public boolean hasDirectory() {
9884              return ((bitField0_ & 0x00000010) == 0x00000010);
9885            }
9886            /**
9887             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9888             */
9889            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDirectory() {
9890              if (directoryBuilder_ == null) {
9891                return directory_;
9892              } else {
9893                return directoryBuilder_.getMessage();
9894              }
9895            }
9896            /**
9897             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9898             */
9899            public Builder setDirectory(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
9900              if (directoryBuilder_ == null) {
9901                if (value == null) {
9902                  throw new NullPointerException();
9903                }
9904                directory_ = value;
9905                onChanged();
9906              } else {
9907                directoryBuilder_.setMessage(value);
9908              }
9909              bitField0_ |= 0x00000010;
9910              return this;
9911            }
9912            /**
9913             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9914             */
9915            public Builder setDirectory(
9916                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder builderForValue) {
9917              if (directoryBuilder_ == null) {
9918                directory_ = builderForValue.build();
9919                onChanged();
9920              } else {
9921                directoryBuilder_.setMessage(builderForValue.build());
9922              }
9923              bitField0_ |= 0x00000010;
9924              return this;
9925            }
9926            /**
9927             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9928             */
9929            public Builder mergeDirectory(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
9930              if (directoryBuilder_ == null) {
9931                if (((bitField0_ & 0x00000010) == 0x00000010) &&
9932                    directory_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance()) {
9933                  directory_ =
9934                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.newBuilder(directory_).mergeFrom(value).buildPartial();
9935                } else {
9936                  directory_ = value;
9937                }
9938                onChanged();
9939              } else {
9940                directoryBuilder_.mergeFrom(value);
9941              }
9942              bitField0_ |= 0x00000010;
9943              return this;
9944            }
9945            /**
9946             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9947             */
9948            public Builder clearDirectory() {
9949              if (directoryBuilder_ == null) {
9950                directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
9951                onChanged();
9952              } else {
9953                directoryBuilder_.clear();
9954              }
9955              bitField0_ = (bitField0_ & ~0x00000010);
9956              return this;
9957            }
9958            /**
9959             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9960             */
9961            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder getDirectoryBuilder() {
9962              bitField0_ |= 0x00000010;
9963              onChanged();
9964              return getDirectoryFieldBuilder().getBuilder();
9965            }
9966            /**
9967             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9968             */
9969            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getDirectoryOrBuilder() {
9970              if (directoryBuilder_ != null) {
9971                return directoryBuilder_.getMessageOrBuilder();
9972              } else {
9973                return directory_;
9974              }
9975            }
9976            /**
9977             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9978             */
9979            private com.google.protobuf.SingleFieldBuilder<
9980                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> 
9981                getDirectoryFieldBuilder() {
9982              if (directoryBuilder_ == null) {
9983                directoryBuilder_ = new com.google.protobuf.SingleFieldBuilder<
9984                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder>(
9985                        directory_,
9986                        getParentForChildren(),
9987                        isClean());
9988                directory_ = null;
9989              }
9990              return directoryBuilder_;
9991            }
9992    
9993            // optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;
9994            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
9995            private com.google.protobuf.SingleFieldBuilder<
9996                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder> symlinkBuilder_;
9997            /**
9998             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9999             */
10000            public boolean hasSymlink() {
10001              return ((bitField0_ & 0x00000020) == 0x00000020);
10002            }
10003            /**
10004             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
10005             */
10006            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getSymlink() {
10007              if (symlinkBuilder_ == null) {
10008                return symlink_;
10009              } else {
10010                return symlinkBuilder_.getMessage();
10011              }
10012            }
10013            /**
10014             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
10015             */
10016            public Builder setSymlink(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink value) {
10017              if (symlinkBuilder_ == null) {
10018                if (value == null) {
10019                  throw new NullPointerException();
10020                }
10021                symlink_ = value;
10022                onChanged();
10023              } else {
10024                symlinkBuilder_.setMessage(value);
10025              }
10026              bitField0_ |= 0x00000020;
10027              return this;
10028            }
10029            /**
10030             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
10031             */
10032            public Builder setSymlink(
10033                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder builderForValue) {
10034              if (symlinkBuilder_ == null) {
10035                symlink_ = builderForValue.build();
10036                onChanged();
10037              } else {
10038                symlinkBuilder_.setMessage(builderForValue.build());
10039              }
10040              bitField0_ |= 0x00000020;
10041              return this;
10042            }
10043            /**
10044             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
10045             */
10046            public Builder mergeSymlink(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink value) {
10047              if (symlinkBuilder_ == null) {
10048                if (((bitField0_ & 0x00000020) == 0x00000020) &&
10049                    symlink_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance()) {
10050                  symlink_ =
10051                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.newBuilder(symlink_).mergeFrom(value).buildPartial();
10052                } else {
10053                  symlink_ = value;
10054                }
10055                onChanged();
10056              } else {
10057                symlinkBuilder_.mergeFrom(value);
10058              }
10059              bitField0_ |= 0x00000020;
10060              return this;
10061            }
10062            /**
10063             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
10064             */
10065            public Builder clearSymlink() {
10066              if (symlinkBuilder_ == null) {
10067                symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
10068                onChanged();
10069              } else {
10070                symlinkBuilder_.clear();
10071              }
10072              bitField0_ = (bitField0_ & ~0x00000020);
10073              return this;
10074            }
10075            /**
10076             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
10077             */
10078            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder getSymlinkBuilder() {
10079              bitField0_ |= 0x00000020;
10080              onChanged();
10081              return getSymlinkFieldBuilder().getBuilder();
10082            }
10083            /**
10084             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
10085             */
10086            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder getSymlinkOrBuilder() {
10087              if (symlinkBuilder_ != null) {
10088                return symlinkBuilder_.getMessageOrBuilder();
10089              } else {
10090                return symlink_;
10091              }
10092            }
10093            /**
10094             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
10095             */
10096            private com.google.protobuf.SingleFieldBuilder<
10097                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder> 
10098                getSymlinkFieldBuilder() {
10099              if (symlinkBuilder_ == null) {
10100                symlinkBuilder_ = new com.google.protobuf.SingleFieldBuilder<
10101                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder>(
10102                        symlink_,
10103                        getParentForChildren(),
10104                        isClean());
10105                symlink_ = null;
10106              }
10107              return symlinkBuilder_;
10108            }
10109    
10110            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INode)
10111          }
10112    
10113          static {
10114            defaultInstance = new INode(true);
10115            defaultInstance.initFields();
10116          }
10117    
10118          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INode)
10119        }
10120    
10121        private int bitField0_;
10122        // optional uint64 lastInodeId = 1;
10123        public static final int LASTINODEID_FIELD_NUMBER = 1;
10124        private long lastInodeId_;
10125        /**
10126         * <code>optional uint64 lastInodeId = 1;</code>
10127         */
10128        public boolean hasLastInodeId() {
10129          return ((bitField0_ & 0x00000001) == 0x00000001);
10130        }
10131        /**
10132         * <code>optional uint64 lastInodeId = 1;</code>
10133         */
10134        public long getLastInodeId() {
10135          return lastInodeId_;
10136        }
10137    
10138        // optional uint64 numInodes = 2;
10139        public static final int NUMINODES_FIELD_NUMBER = 2;
10140        private long numInodes_;
10141        /**
10142         * <code>optional uint64 numInodes = 2;</code>
10143         *
10144         * <pre>
10145         * repeated INodes..
10146         * </pre>
10147         */
10148        public boolean hasNumInodes() {
10149          return ((bitField0_ & 0x00000002) == 0x00000002);
10150        }
10151        /**
10152         * <code>optional uint64 numInodes = 2;</code>
10153         *
10154         * <pre>
10155         * repeated INodes..
10156         * </pre>
10157         */
10158        public long getNumInodes() {
10159          return numInodes_;
10160        }
10161    
10162        private void initFields() {
10163          lastInodeId_ = 0L;
10164          numInodes_ = 0L;
10165        }
10166        private byte memoizedIsInitialized = -1;
10167        public final boolean isInitialized() {
10168          byte isInitialized = memoizedIsInitialized;
10169          if (isInitialized != -1) return isInitialized == 1;
10170    
10171          memoizedIsInitialized = 1;
10172          return true;
10173        }
10174    
10175        public void writeTo(com.google.protobuf.CodedOutputStream output)
10176                            throws java.io.IOException {
10177          getSerializedSize();
10178          if (((bitField0_ & 0x00000001) == 0x00000001)) {
10179            output.writeUInt64(1, lastInodeId_);
10180          }
10181          if (((bitField0_ & 0x00000002) == 0x00000002)) {
10182            output.writeUInt64(2, numInodes_);
10183          }
10184          getUnknownFields().writeTo(output);
10185        }
10186    
10187        private int memoizedSerializedSize = -1;
10188        public int getSerializedSize() {
10189          int size = memoizedSerializedSize;
10190          if (size != -1) return size;
10191    
10192          size = 0;
10193          if (((bitField0_ & 0x00000001) == 0x00000001)) {
10194            size += com.google.protobuf.CodedOutputStream
10195              .computeUInt64Size(1, lastInodeId_);
10196          }
10197          if (((bitField0_ & 0x00000002) == 0x00000002)) {
10198            size += com.google.protobuf.CodedOutputStream
10199              .computeUInt64Size(2, numInodes_);
10200          }
10201          size += getUnknownFields().getSerializedSize();
10202          memoizedSerializedSize = size;
10203          return size;
10204        }
10205    
10206        private static final long serialVersionUID = 0L;
10207        @java.lang.Override
10208        protected java.lang.Object writeReplace()
10209            throws java.io.ObjectStreamException {
10210          return super.writeReplace();
10211        }
10212    
10213        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10214            com.google.protobuf.ByteString data)
10215            throws com.google.protobuf.InvalidProtocolBufferException {
10216          return PARSER.parseFrom(data);
10217        }
10218        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10219            com.google.protobuf.ByteString data,
10220            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10221            throws com.google.protobuf.InvalidProtocolBufferException {
10222          return PARSER.parseFrom(data, extensionRegistry);
10223        }
10224        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(byte[] data)
10225            throws com.google.protobuf.InvalidProtocolBufferException {
10226          return PARSER.parseFrom(data);
10227        }
10228        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10229            byte[] data,
10230            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10231            throws com.google.protobuf.InvalidProtocolBufferException {
10232          return PARSER.parseFrom(data, extensionRegistry);
10233        }
10234        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(java.io.InputStream input)
10235            throws java.io.IOException {
10236          return PARSER.parseFrom(input);
10237        }
10238        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10239            java.io.InputStream input,
10240            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10241            throws java.io.IOException {
10242          return PARSER.parseFrom(input, extensionRegistry);
10243        }
10244        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseDelimitedFrom(java.io.InputStream input)
10245            throws java.io.IOException {
10246          return PARSER.parseDelimitedFrom(input);
10247        }
10248        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseDelimitedFrom(
10249            java.io.InputStream input,
10250            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10251            throws java.io.IOException {
10252          return PARSER.parseDelimitedFrom(input, extensionRegistry);
10253        }
10254        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10255            com.google.protobuf.CodedInputStream input)
10256            throws java.io.IOException {
10257          return PARSER.parseFrom(input);
10258        }
10259        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10260            com.google.protobuf.CodedInputStream input,
10261            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10262            throws java.io.IOException {
10263          return PARSER.parseFrom(input, extensionRegistry);
10264        }
10265    
10266        public static Builder newBuilder() { return Builder.create(); }
10267        public Builder newBuilderForType() { return newBuilder(); }
10268        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection prototype) {
10269          return newBuilder().mergeFrom(prototype);
10270        }
10271        public Builder toBuilder() { return newBuilder(this); }
10272    
10273        @java.lang.Override
10274        protected Builder newBuilderForType(
10275            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10276          Builder builder = new Builder(parent);
10277          return builder;
10278        }
10279        /**
10280         * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection}
10281         *
10282         * <pre>
10283         **
10284         * Permission is serialized as a 64-bit long. [0:24):[25:48):[48:64) (in Big Endian).
10285         * The first and the second parts are the string ids of the user and
10286         * group name, and the last 16 bits are the permission bits.
10287         *
10288         * Name: INODE
10289         * </pre>
10290         */
10291        public static final class Builder extends
10292            com.google.protobuf.GeneratedMessage.Builder<Builder>
10293           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSectionOrBuilder {
10294          public static final com.google.protobuf.Descriptors.Descriptor
10295              getDescriptor() {
10296            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
10297          }
10298    
10299          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10300              internalGetFieldAccessorTable() {
10301            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable
10302                .ensureFieldAccessorsInitialized(
10303                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.Builder.class);
10304          }
10305    
10306          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.newBuilder()
10307          private Builder() {
10308            maybeForceBuilderInitialization();
10309          }
10310    
10311          private Builder(
10312              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10313            super(parent);
10314            maybeForceBuilderInitialization();
10315          }
10316          private void maybeForceBuilderInitialization() {
10317            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10318            }
10319          }
10320          private static Builder create() {
10321            return new Builder();
10322          }
10323    
10324          public Builder clear() {
10325            super.clear();
10326            lastInodeId_ = 0L;
10327            bitField0_ = (bitField0_ & ~0x00000001);
10328            numInodes_ = 0L;
10329            bitField0_ = (bitField0_ & ~0x00000002);
10330            return this;
10331          }
10332    
10333          public Builder clone() {
10334            return create().mergeFrom(buildPartial());
10335          }
10336    
10337          public com.google.protobuf.Descriptors.Descriptor
10338              getDescriptorForType() {
10339            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
10340          }
10341    
10342          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection getDefaultInstanceForType() {
10343            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.getDefaultInstance();
10344          }
10345    
10346          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection build() {
10347            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection result = buildPartial();
10348            if (!result.isInitialized()) {
10349              throw newUninitializedMessageException(result);
10350            }
10351            return result;
10352          }
10353    
10354          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection buildPartial() {
10355            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection(this);
10356            int from_bitField0_ = bitField0_;
10357            int to_bitField0_ = 0;
10358            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10359              to_bitField0_ |= 0x00000001;
10360            }
10361            result.lastInodeId_ = lastInodeId_;
10362            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10363              to_bitField0_ |= 0x00000002;
10364            }
10365            result.numInodes_ = numInodes_;
10366            result.bitField0_ = to_bitField0_;
10367            onBuilt();
10368            return result;
10369          }
10370    
10371          public Builder mergeFrom(com.google.protobuf.Message other) {
10372            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection) {
10373              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection)other);
10374            } else {
10375              super.mergeFrom(other);
10376              return this;
10377            }
10378          }
10379    
10380          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection other) {
10381            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.getDefaultInstance()) return this;
10382            if (other.hasLastInodeId()) {
10383              setLastInodeId(other.getLastInodeId());
10384            }
10385            if (other.hasNumInodes()) {
10386              setNumInodes(other.getNumInodes());
10387            }
10388            this.mergeUnknownFields(other.getUnknownFields());
10389            return this;
10390          }
10391    
10392          public final boolean isInitialized() {
10393            return true;
10394          }
10395    
10396          public Builder mergeFrom(
10397              com.google.protobuf.CodedInputStream input,
10398              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10399              throws java.io.IOException {
10400            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parsedMessage = null;
10401            try {
10402              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10403            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10404              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection) e.getUnfinishedMessage();
10405              throw e;
10406            } finally {
10407              if (parsedMessage != null) {
10408                mergeFrom(parsedMessage);
10409              }
10410            }
10411            return this;
10412          }
10413          private int bitField0_;
10414    
10415          // optional uint64 lastInodeId = 1;
10416          private long lastInodeId_ ;
10417          /**
10418           * <code>optional uint64 lastInodeId = 1;</code>
10419           */
10420          public boolean hasLastInodeId() {
10421            return ((bitField0_ & 0x00000001) == 0x00000001);
10422          }
10423          /**
10424           * <code>optional uint64 lastInodeId = 1;</code>
10425           */
10426          public long getLastInodeId() {
10427            return lastInodeId_;
10428          }
10429          /**
10430           * <code>optional uint64 lastInodeId = 1;</code>
10431           */
10432          public Builder setLastInodeId(long value) {
10433            bitField0_ |= 0x00000001;
10434            lastInodeId_ = value;
10435            onChanged();
10436            return this;
10437          }
10438          /**
10439           * <code>optional uint64 lastInodeId = 1;</code>
10440           */
10441          public Builder clearLastInodeId() {
10442            bitField0_ = (bitField0_ & ~0x00000001);
10443            lastInodeId_ = 0L;
10444            onChanged();
10445            return this;
10446          }
10447    
10448          // optional uint64 numInodes = 2;
10449          private long numInodes_ ;
10450          /**
10451           * <code>optional uint64 numInodes = 2;</code>
10452           *
10453           * <pre>
10454           * repeated INodes..
10455           * </pre>
10456           */
10457          public boolean hasNumInodes() {
10458            return ((bitField0_ & 0x00000002) == 0x00000002);
10459          }
10460          /**
10461           * <code>optional uint64 numInodes = 2;</code>
10462           *
10463           * <pre>
10464           * repeated INodes..
10465           * </pre>
10466           */
10467          public long getNumInodes() {
10468            return numInodes_;
10469          }
10470          /**
10471           * <code>optional uint64 numInodes = 2;</code>
10472           *
10473           * <pre>
10474           * repeated INodes..
10475           * </pre>
10476           */
10477          public Builder setNumInodes(long value) {
10478            bitField0_ |= 0x00000002;
10479            numInodes_ = value;
10480            onChanged();
10481            return this;
10482          }
10483          /**
10484           * <code>optional uint64 numInodes = 2;</code>
10485           *
10486           * <pre>
10487           * repeated INodes..
10488           * </pre>
10489           */
10490          public Builder clearNumInodes() {
10491            bitField0_ = (bitField0_ & ~0x00000002);
10492            numInodes_ = 0L;
10493            onChanged();
10494            return this;
10495          }
10496    
10497          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection)
10498        }
10499    
10500        static {
10501          defaultInstance = new INodeSection(true);
10502          defaultInstance.initFields();
10503        }
10504    
10505        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection)
10506      }
10507    
10508      public interface FilesUnderConstructionSectionOrBuilder
10509          extends com.google.protobuf.MessageOrBuilder {
10510      }
10511      /**
10512       * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection}
10513       *
10514       * <pre>
10515       **
10516       * This section records information about under-construction files for
10517       * reconstructing the lease map.
10518       * NAME: FILES_UNDERCONSTRUCTION
10519       * </pre>
10520       */
10521      public static final class FilesUnderConstructionSection extends
10522          com.google.protobuf.GeneratedMessage
10523          implements FilesUnderConstructionSectionOrBuilder {
10524        // Use FilesUnderConstructionSection.newBuilder() to construct.
10525        private FilesUnderConstructionSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10526          super(builder);
10527          this.unknownFields = builder.getUnknownFields();
10528        }
10529        private FilesUnderConstructionSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10530    
10531        private static final FilesUnderConstructionSection defaultInstance;
10532        public static FilesUnderConstructionSection getDefaultInstance() {
10533          return defaultInstance;
10534        }
10535    
10536        public FilesUnderConstructionSection getDefaultInstanceForType() {
10537          return defaultInstance;
10538        }
10539    
10540        private final com.google.protobuf.UnknownFieldSet unknownFields;
10541        @java.lang.Override
10542        public final com.google.protobuf.UnknownFieldSet
10543            getUnknownFields() {
10544          return this.unknownFields;
10545        }
10546        private FilesUnderConstructionSection(
10547            com.google.protobuf.CodedInputStream input,
10548            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10549            throws com.google.protobuf.InvalidProtocolBufferException {
10550          initFields();
10551          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
10552              com.google.protobuf.UnknownFieldSet.newBuilder();
10553          try {
10554            boolean done = false;
10555            while (!done) {
10556              int tag = input.readTag();
10557              switch (tag) {
10558                case 0:
10559                  done = true;
10560                  break;
10561                default: {
10562                  if (!parseUnknownField(input, unknownFields,
10563                                         extensionRegistry, tag)) {
10564                    done = true;
10565                  }
10566                  break;
10567                }
10568              }
10569            }
10570          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10571            throw e.setUnfinishedMessage(this);
10572          } catch (java.io.IOException e) {
10573            throw new com.google.protobuf.InvalidProtocolBufferException(
10574                e.getMessage()).setUnfinishedMessage(this);
10575          } finally {
10576            this.unknownFields = unknownFields.build();
10577            makeExtensionsImmutable();
10578          }
10579        }
10580        public static final com.google.protobuf.Descriptors.Descriptor
10581            getDescriptor() {
10582          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
10583        }
10584    
10585        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10586            internalGetFieldAccessorTable() {
10587          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable
10588              .ensureFieldAccessorsInitialized(
10589                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.Builder.class);
10590        }
10591    
10592        public static com.google.protobuf.Parser<FilesUnderConstructionSection> PARSER =
10593            new com.google.protobuf.AbstractParser<FilesUnderConstructionSection>() {
10594          public FilesUnderConstructionSection parsePartialFrom(
10595              com.google.protobuf.CodedInputStream input,
10596              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10597              throws com.google.protobuf.InvalidProtocolBufferException {
10598            return new FilesUnderConstructionSection(input, extensionRegistry);
10599          }
10600        };
10601    
10602        @java.lang.Override
10603        public com.google.protobuf.Parser<FilesUnderConstructionSection> getParserForType() {
10604          return PARSER;
10605        }
10606    
10607        public interface FileUnderConstructionEntryOrBuilder
10608            extends com.google.protobuf.MessageOrBuilder {
10609    
10610          // optional uint64 inodeId = 1;
10611          /**
10612           * <code>optional uint64 inodeId = 1;</code>
10613           */
10614          boolean hasInodeId();
10615          /**
10616           * <code>optional uint64 inodeId = 1;</code>
10617           */
10618          long getInodeId();
10619    
10620          // optional string fullPath = 2;
10621          /**
10622           * <code>optional string fullPath = 2;</code>
10623           */
10624          boolean hasFullPath();
10625          /**
10626           * <code>optional string fullPath = 2;</code>
10627           */
10628          java.lang.String getFullPath();
10629          /**
10630           * <code>optional string fullPath = 2;</code>
10631           */
10632          com.google.protobuf.ByteString
10633              getFullPathBytes();
10634        }
10635        /**
10636         * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry}
10637         */
10638        public static final class FileUnderConstructionEntry extends
10639            com.google.protobuf.GeneratedMessage
10640            implements FileUnderConstructionEntryOrBuilder {
10641          // Use FileUnderConstructionEntry.newBuilder() to construct.
10642          private FileUnderConstructionEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10643            super(builder);
10644            this.unknownFields = builder.getUnknownFields();
10645          }
10646          private FileUnderConstructionEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10647    
10648          private static final FileUnderConstructionEntry defaultInstance;
10649          public static FileUnderConstructionEntry getDefaultInstance() {
10650            return defaultInstance;
10651          }
10652    
10653          public FileUnderConstructionEntry getDefaultInstanceForType() {
10654            return defaultInstance;
10655          }
10656    
10657          private final com.google.protobuf.UnknownFieldSet unknownFields;
10658          @java.lang.Override
10659          public final com.google.protobuf.UnknownFieldSet
10660              getUnknownFields() {
10661            return this.unknownFields;
10662          }
10663          private FileUnderConstructionEntry(
10664              com.google.protobuf.CodedInputStream input,
10665              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10666              throws com.google.protobuf.InvalidProtocolBufferException {
10667            initFields();
10668            int mutable_bitField0_ = 0;
10669            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
10670                com.google.protobuf.UnknownFieldSet.newBuilder();
10671            try {
10672              boolean done = false;
10673              while (!done) {
10674                int tag = input.readTag();
10675                switch (tag) {
10676                  case 0:
10677                    done = true;
10678                    break;
10679                  default: {
10680                    if (!parseUnknownField(input, unknownFields,
10681                                           extensionRegistry, tag)) {
10682                      done = true;
10683                    }
10684                    break;
10685                  }
10686                  case 8: {
10687                    bitField0_ |= 0x00000001;
10688                    inodeId_ = input.readUInt64();
10689                    break;
10690                  }
10691                  case 18: {
10692                    bitField0_ |= 0x00000002;
10693                    fullPath_ = input.readBytes();
10694                    break;
10695                  }
10696                }
10697              }
10698            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10699              throw e.setUnfinishedMessage(this);
10700            } catch (java.io.IOException e) {
10701              throw new com.google.protobuf.InvalidProtocolBufferException(
10702                  e.getMessage()).setUnfinishedMessage(this);
10703            } finally {
10704              this.unknownFields = unknownFields.build();
10705              makeExtensionsImmutable();
10706            }
10707          }
10708          public static final com.google.protobuf.Descriptors.Descriptor
10709              getDescriptor() {
10710            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
10711          }
10712    
10713          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10714              internalGetFieldAccessorTable() {
10715            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable
10716                .ensureFieldAccessorsInitialized(
10717                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.Builder.class);
10718          }
10719    
10720          public static com.google.protobuf.Parser<FileUnderConstructionEntry> PARSER =
10721              new com.google.protobuf.AbstractParser<FileUnderConstructionEntry>() {
10722            public FileUnderConstructionEntry parsePartialFrom(
10723                com.google.protobuf.CodedInputStream input,
10724                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10725                throws com.google.protobuf.InvalidProtocolBufferException {
10726              return new FileUnderConstructionEntry(input, extensionRegistry);
10727            }
10728          };
10729    
10730          @java.lang.Override
10731          public com.google.protobuf.Parser<FileUnderConstructionEntry> getParserForType() {
10732            return PARSER;
10733          }
10734    
10735          private int bitField0_;
10736          // optional uint64 inodeId = 1;
10737          public static final int INODEID_FIELD_NUMBER = 1;
10738          private long inodeId_;
10739          /**
10740           * <code>optional uint64 inodeId = 1;</code>
10741           */
10742          public boolean hasInodeId() {
10743            return ((bitField0_ & 0x00000001) == 0x00000001);
10744          }
10745          /**
10746           * <code>optional uint64 inodeId = 1;</code>
10747           */
10748          public long getInodeId() {
10749            return inodeId_;
10750          }
10751    
10752          // optional string fullPath = 2;
10753          public static final int FULLPATH_FIELD_NUMBER = 2;
10754          private java.lang.Object fullPath_;
10755          /**
10756           * <code>optional string fullPath = 2;</code>
10757           */
10758          public boolean hasFullPath() {
10759            return ((bitField0_ & 0x00000002) == 0x00000002);
10760          }
10761          /**
10762           * <code>optional string fullPath = 2;</code>
10763           */
10764          public java.lang.String getFullPath() {
10765            java.lang.Object ref = fullPath_;
10766            if (ref instanceof java.lang.String) {
10767              return (java.lang.String) ref;
10768            } else {
10769              com.google.protobuf.ByteString bs = 
10770                  (com.google.protobuf.ByteString) ref;
10771              java.lang.String s = bs.toStringUtf8();
10772              if (bs.isValidUtf8()) {
10773                fullPath_ = s;
10774              }
10775              return s;
10776            }
10777          }
10778          /**
10779           * <code>optional string fullPath = 2;</code>
10780           */
10781          public com.google.protobuf.ByteString
10782              getFullPathBytes() {
10783            java.lang.Object ref = fullPath_;
10784            if (ref instanceof java.lang.String) {
10785              com.google.protobuf.ByteString b = 
10786                  com.google.protobuf.ByteString.copyFromUtf8(
10787                      (java.lang.String) ref);
10788              fullPath_ = b;
10789              return b;
10790            } else {
10791              return (com.google.protobuf.ByteString) ref;
10792            }
10793          }
10794    
10795          private void initFields() {
10796            inodeId_ = 0L;
10797            fullPath_ = "";
10798          }
10799          private byte memoizedIsInitialized = -1;
10800          public final boolean isInitialized() {
10801            byte isInitialized = memoizedIsInitialized;
10802            if (isInitialized != -1) return isInitialized == 1;
10803    
10804            memoizedIsInitialized = 1;
10805            return true;
10806          }
10807    
10808          public void writeTo(com.google.protobuf.CodedOutputStream output)
10809                              throws java.io.IOException {
10810            getSerializedSize();
10811            if (((bitField0_ & 0x00000001) == 0x00000001)) {
10812              output.writeUInt64(1, inodeId_);
10813            }
10814            if (((bitField0_ & 0x00000002) == 0x00000002)) {
10815              output.writeBytes(2, getFullPathBytes());
10816            }
10817            getUnknownFields().writeTo(output);
10818          }
10819    
10820          private int memoizedSerializedSize = -1;
10821          public int getSerializedSize() {
10822            int size = memoizedSerializedSize;
10823            if (size != -1) return size;
10824    
10825            size = 0;
10826            if (((bitField0_ & 0x00000001) == 0x00000001)) {
10827              size += com.google.protobuf.CodedOutputStream
10828                .computeUInt64Size(1, inodeId_);
10829            }
10830            if (((bitField0_ & 0x00000002) == 0x00000002)) {
10831              size += com.google.protobuf.CodedOutputStream
10832                .computeBytesSize(2, getFullPathBytes());
10833            }
10834            size += getUnknownFields().getSerializedSize();
10835            memoizedSerializedSize = size;
10836            return size;
10837          }
10838    
10839          private static final long serialVersionUID = 0L;
10840          @java.lang.Override
10841          protected java.lang.Object writeReplace()
10842              throws java.io.ObjectStreamException {
10843            return super.writeReplace();
10844          }
10845    
10846          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10847              com.google.protobuf.ByteString data)
10848              throws com.google.protobuf.InvalidProtocolBufferException {
10849            return PARSER.parseFrom(data);
10850          }
10851          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10852              com.google.protobuf.ByteString data,
10853              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10854              throws com.google.protobuf.InvalidProtocolBufferException {
10855            return PARSER.parseFrom(data, extensionRegistry);
10856          }
10857          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(byte[] data)
10858              throws com.google.protobuf.InvalidProtocolBufferException {
10859            return PARSER.parseFrom(data);
10860          }
10861          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10862              byte[] data,
10863              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10864              throws com.google.protobuf.InvalidProtocolBufferException {
10865            return PARSER.parseFrom(data, extensionRegistry);
10866          }
10867          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(java.io.InputStream input)
10868              throws java.io.IOException {
10869            return PARSER.parseFrom(input);
10870          }
10871          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10872              java.io.InputStream input,
10873              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10874              throws java.io.IOException {
10875            return PARSER.parseFrom(input, extensionRegistry);
10876          }
10877          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseDelimitedFrom(java.io.InputStream input)
10878              throws java.io.IOException {
10879            return PARSER.parseDelimitedFrom(input);
10880          }
10881          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseDelimitedFrom(
10882              java.io.InputStream input,
10883              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10884              throws java.io.IOException {
10885            return PARSER.parseDelimitedFrom(input, extensionRegistry);
10886          }
10887          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10888              com.google.protobuf.CodedInputStream input)
10889              throws java.io.IOException {
10890            return PARSER.parseFrom(input);
10891          }
10892          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10893              com.google.protobuf.CodedInputStream input,
10894              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10895              throws java.io.IOException {
10896            return PARSER.parseFrom(input, extensionRegistry);
10897          }
10898    
10899          public static Builder newBuilder() { return Builder.create(); }
10900          public Builder newBuilderForType() { return newBuilder(); }
10901          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry prototype) {
10902            return newBuilder().mergeFrom(prototype);
10903          }
10904          public Builder toBuilder() { return newBuilder(this); }
10905    
10906          @java.lang.Override
10907          protected Builder newBuilderForType(
10908              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10909            Builder builder = new Builder(parent);
10910            return builder;
10911          }
10912          /**
10913           * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry}
10914           */
10915          public static final class Builder extends
10916              com.google.protobuf.GeneratedMessage.Builder<Builder>
10917             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntryOrBuilder {
10918            public static final com.google.protobuf.Descriptors.Descriptor
10919                getDescriptor() {
10920              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
10921            }
10922    
10923            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10924                internalGetFieldAccessorTable() {
10925              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable
10926                  .ensureFieldAccessorsInitialized(
10927                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.Builder.class);
10928            }
10929    
10930            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.newBuilder()
10931            private Builder() {
10932              maybeForceBuilderInitialization();
10933            }
10934    
10935            private Builder(
10936                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10937              super(parent);
10938              maybeForceBuilderInitialization();
10939            }
10940            private void maybeForceBuilderInitialization() {
10941              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10942              }
10943            }
10944            private static Builder create() {
10945              return new Builder();
10946            }
10947    
10948            public Builder clear() {
10949              super.clear();
10950              inodeId_ = 0L;
10951              bitField0_ = (bitField0_ & ~0x00000001);
10952              fullPath_ = "";
10953              bitField0_ = (bitField0_ & ~0x00000002);
10954              return this;
10955            }
10956    
10957            public Builder clone() {
10958              return create().mergeFrom(buildPartial());
10959            }
10960    
10961            public com.google.protobuf.Descriptors.Descriptor
10962                getDescriptorForType() {
10963              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
10964            }
10965    
10966            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry getDefaultInstanceForType() {
10967              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.getDefaultInstance();
10968            }
10969    
10970            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry build() {
10971              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry result = buildPartial();
10972              if (!result.isInitialized()) {
10973                throw newUninitializedMessageException(result);
10974              }
10975              return result;
10976            }
10977    
10978            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry buildPartial() {
10979              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry(this);
10980              int from_bitField0_ = bitField0_;
10981              int to_bitField0_ = 0;
10982              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10983                to_bitField0_ |= 0x00000001;
10984              }
10985              result.inodeId_ = inodeId_;
10986              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10987                to_bitField0_ |= 0x00000002;
10988              }
10989              result.fullPath_ = fullPath_;
10990              result.bitField0_ = to_bitField0_;
10991              onBuilt();
10992              return result;
10993            }
10994    
10995            public Builder mergeFrom(com.google.protobuf.Message other) {
10996              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry) {
10997                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry)other);
10998              } else {
10999                super.mergeFrom(other);
11000                return this;
11001              }
11002            }
11003    
11004            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry other) {
11005              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.getDefaultInstance()) return this;
11006              if (other.hasInodeId()) {
11007                setInodeId(other.getInodeId());
11008              }
11009              if (other.hasFullPath()) {
11010                bitField0_ |= 0x00000002;
11011                fullPath_ = other.fullPath_;
11012                onChanged();
11013              }
11014              this.mergeUnknownFields(other.getUnknownFields());
11015              return this;
11016            }
11017    
11018            public final boolean isInitialized() {
11019              return true;
11020            }
11021    
11022            public Builder mergeFrom(
11023                com.google.protobuf.CodedInputStream input,
11024                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11025                throws java.io.IOException {
11026              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parsedMessage = null;
11027              try {
11028                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11029              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11030                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry) e.getUnfinishedMessage();
11031                throw e;
11032              } finally {
11033                if (parsedMessage != null) {
11034                  mergeFrom(parsedMessage);
11035                }
11036              }
11037              return this;
11038            }
11039            private int bitField0_;
11040    
11041            // optional uint64 inodeId = 1;
11042            private long inodeId_ ;
11043            /**
11044             * <code>optional uint64 inodeId = 1;</code>
11045             */
11046            public boolean hasInodeId() {
11047              return ((bitField0_ & 0x00000001) == 0x00000001);
11048            }
11049            /**
11050             * <code>optional uint64 inodeId = 1;</code>
11051             */
11052            public long getInodeId() {
11053              return inodeId_;
11054            }
11055            /**
11056             * <code>optional uint64 inodeId = 1;</code>
11057             */
11058            public Builder setInodeId(long value) {
11059              bitField0_ |= 0x00000001;
11060              inodeId_ = value;
11061              onChanged();
11062              return this;
11063            }
11064            /**
11065             * <code>optional uint64 inodeId = 1;</code>
11066             */
11067            public Builder clearInodeId() {
11068              bitField0_ = (bitField0_ & ~0x00000001);
11069              inodeId_ = 0L;
11070              onChanged();
11071              return this;
11072            }
11073    
11074            // optional string fullPath = 2;
11075            private java.lang.Object fullPath_ = "";
11076            /**
11077             * <code>optional string fullPath = 2;</code>
11078             */
11079            public boolean hasFullPath() {
11080              return ((bitField0_ & 0x00000002) == 0x00000002);
11081            }
11082            /**
11083             * <code>optional string fullPath = 2;</code>
11084             */
11085            public java.lang.String getFullPath() {
11086              java.lang.Object ref = fullPath_;
11087              if (!(ref instanceof java.lang.String)) {
11088                java.lang.String s = ((com.google.protobuf.ByteString) ref)
11089                    .toStringUtf8();
11090                fullPath_ = s;
11091                return s;
11092              } else {
11093                return (java.lang.String) ref;
11094              }
11095            }
11096            /**
11097             * <code>optional string fullPath = 2;</code>
11098             */
11099            public com.google.protobuf.ByteString
11100                getFullPathBytes() {
11101              java.lang.Object ref = fullPath_;
11102              if (ref instanceof String) {
11103                com.google.protobuf.ByteString b = 
11104                    com.google.protobuf.ByteString.copyFromUtf8(
11105                        (java.lang.String) ref);
11106                fullPath_ = b;
11107                return b;
11108              } else {
11109                return (com.google.protobuf.ByteString) ref;
11110              }
11111            }
11112            /**
11113             * <code>optional string fullPath = 2;</code>
11114             */
11115            public Builder setFullPath(
11116                java.lang.String value) {
11117              if (value == null) {
11118        throw new NullPointerException();
11119      }
11120      bitField0_ |= 0x00000002;
11121              fullPath_ = value;
11122              onChanged();
11123              return this;
11124            }
11125            /**
11126             * <code>optional string fullPath = 2;</code>
11127             */
11128            public Builder clearFullPath() {
11129              bitField0_ = (bitField0_ & ~0x00000002);
11130              fullPath_ = getDefaultInstance().getFullPath();
11131              onChanged();
11132              return this;
11133            }
11134            /**
11135             * <code>optional string fullPath = 2;</code>
11136             */
11137            public Builder setFullPathBytes(
11138                com.google.protobuf.ByteString value) {
11139              if (value == null) {
11140        throw new NullPointerException();
11141      }
11142      bitField0_ |= 0x00000002;
11143              fullPath_ = value;
11144              onChanged();
11145              return this;
11146            }
11147    
11148            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry)
11149          }
11150    
11151          static {
11152            defaultInstance = new FileUnderConstructionEntry(true);
11153            defaultInstance.initFields();
11154          }
11155    
11156          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry)
11157        }
11158    
11159        private void initFields() {
11160        }
11161        private byte memoizedIsInitialized = -1;
11162        public final boolean isInitialized() {
11163          byte isInitialized = memoizedIsInitialized;
11164          if (isInitialized != -1) return isInitialized == 1;
11165    
11166          memoizedIsInitialized = 1;
11167          return true;
11168        }
11169    
11170        public void writeTo(com.google.protobuf.CodedOutputStream output)
11171                            throws java.io.IOException {
11172          getSerializedSize();
11173          getUnknownFields().writeTo(output);
11174        }
11175    
11176        private int memoizedSerializedSize = -1;
11177        public int getSerializedSize() {
11178          int size = memoizedSerializedSize;
11179          if (size != -1) return size;
11180    
11181          size = 0;
11182          size += getUnknownFields().getSerializedSize();
11183          memoizedSerializedSize = size;
11184          return size;
11185        }
11186    
11187        private static final long serialVersionUID = 0L;
11188        @java.lang.Override
11189        protected java.lang.Object writeReplace()
11190            throws java.io.ObjectStreamException {
11191          return super.writeReplace();
11192        }
11193    
11194        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11195            com.google.protobuf.ByteString data)
11196            throws com.google.protobuf.InvalidProtocolBufferException {
11197          return PARSER.parseFrom(data);
11198        }
11199        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11200            com.google.protobuf.ByteString data,
11201            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11202            throws com.google.protobuf.InvalidProtocolBufferException {
11203          return PARSER.parseFrom(data, extensionRegistry);
11204        }
11205        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(byte[] data)
11206            throws com.google.protobuf.InvalidProtocolBufferException {
11207          return PARSER.parseFrom(data);
11208        }
11209        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11210            byte[] data,
11211            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11212            throws com.google.protobuf.InvalidProtocolBufferException {
11213          return PARSER.parseFrom(data, extensionRegistry);
11214        }
11215        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(java.io.InputStream input)
11216            throws java.io.IOException {
11217          return PARSER.parseFrom(input);
11218        }
11219        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11220            java.io.InputStream input,
11221            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11222            throws java.io.IOException {
11223          return PARSER.parseFrom(input, extensionRegistry);
11224        }
11225        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseDelimitedFrom(java.io.InputStream input)
11226            throws java.io.IOException {
11227          return PARSER.parseDelimitedFrom(input);
11228        }
11229        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseDelimitedFrom(
11230            java.io.InputStream input,
11231            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11232            throws java.io.IOException {
11233          return PARSER.parseDelimitedFrom(input, extensionRegistry);
11234        }
11235        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11236            com.google.protobuf.CodedInputStream input)
11237            throws java.io.IOException {
11238          return PARSER.parseFrom(input);
11239        }
11240        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11241            com.google.protobuf.CodedInputStream input,
11242            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11243            throws java.io.IOException {
11244          return PARSER.parseFrom(input, extensionRegistry);
11245        }
11246    
11247        public static Builder newBuilder() { return Builder.create(); }
11248        public Builder newBuilderForType() { return newBuilder(); }
11249        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection prototype) {
11250          return newBuilder().mergeFrom(prototype);
11251        }
11252        public Builder toBuilder() { return newBuilder(this); }
11253    
11254        @java.lang.Override
11255        protected Builder newBuilderForType(
11256            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11257          Builder builder = new Builder(parent);
11258          return builder;
11259        }
11260        /**
11261         * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection}
11262         *
11263         * <pre>
11264         **
11265         * This section records information about under-construction files for
11266         * reconstructing the lease map.
11267         * NAME: FILES_UNDERCONSTRUCTION
11268         * </pre>
11269         */
11270        public static final class Builder extends
11271            com.google.protobuf.GeneratedMessage.Builder<Builder>
11272           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSectionOrBuilder {
11273          public static final com.google.protobuf.Descriptors.Descriptor
11274              getDescriptor() {
11275            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
11276          }
11277    
11278          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11279              internalGetFieldAccessorTable() {
11280            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable
11281                .ensureFieldAccessorsInitialized(
11282                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.Builder.class);
11283          }
11284    
11285          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.newBuilder()
11286          private Builder() {
11287            maybeForceBuilderInitialization();
11288          }
11289    
11290          private Builder(
11291              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11292            super(parent);
11293            maybeForceBuilderInitialization();
11294          }
11295          private void maybeForceBuilderInitialization() {
11296            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11297            }
11298          }
11299          private static Builder create() {
11300            return new Builder();
11301          }
11302    
11303          public Builder clear() {
11304            super.clear();
11305            return this;
11306          }
11307    
11308          public Builder clone() {
11309            return create().mergeFrom(buildPartial());
11310          }
11311    
11312          public com.google.protobuf.Descriptors.Descriptor
11313              getDescriptorForType() {
11314            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
11315          }
11316    
11317          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection getDefaultInstanceForType() {
11318            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.getDefaultInstance();
11319          }
11320    
11321          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection build() {
11322            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection result = buildPartial();
11323            if (!result.isInitialized()) {
11324              throw newUninitializedMessageException(result);
11325            }
11326            return result;
11327          }
11328    
11329          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection buildPartial() {
11330            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection(this);
11331            onBuilt();
11332            return result;
11333          }
11334    
11335          public Builder mergeFrom(com.google.protobuf.Message other) {
11336            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection) {
11337              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection)other);
11338            } else {
11339              super.mergeFrom(other);
11340              return this;
11341            }
11342          }
11343    
11344          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection other) {
11345            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.getDefaultInstance()) return this;
11346            this.mergeUnknownFields(other.getUnknownFields());
11347            return this;
11348          }
11349    
11350          public final boolean isInitialized() {
11351            return true;
11352          }
11353    
11354          public Builder mergeFrom(
11355              com.google.protobuf.CodedInputStream input,
11356              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11357              throws java.io.IOException {
11358            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parsedMessage = null;
11359            try {
11360              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11361            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11362              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection) e.getUnfinishedMessage();
11363              throw e;
11364            } finally {
11365              if (parsedMessage != null) {
11366                mergeFrom(parsedMessage);
11367              }
11368            }
11369            return this;
11370          }
11371    
11372          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection)
11373        }
11374    
11375        static {
11376          defaultInstance = new FilesUnderConstructionSection(true);
11377          defaultInstance.initFields();
11378        }
11379    
11380        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection)
11381      }
11382    
11383      public interface INodeDirectorySectionOrBuilder
11384          extends com.google.protobuf.MessageOrBuilder {
11385      }
11386      /**
11387       * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection}
11388       *
11389       * <pre>
11390       **
11391       * This section records the children of each directories
11392       * NAME: INODE_DIR
11393       * </pre>
11394       */
11395      public static final class INodeDirectorySection extends
11396          com.google.protobuf.GeneratedMessage
11397          implements INodeDirectorySectionOrBuilder {
11398        // Use INodeDirectorySection.newBuilder() to construct.
11399        private INodeDirectorySection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11400          super(builder);
11401          this.unknownFields = builder.getUnknownFields();
11402        }
11403        private INodeDirectorySection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11404    
11405        private static final INodeDirectorySection defaultInstance;
11406        public static INodeDirectorySection getDefaultInstance() {
11407          return defaultInstance;
11408        }
11409    
11410        public INodeDirectorySection getDefaultInstanceForType() {
11411          return defaultInstance;
11412        }
11413    
11414        private final com.google.protobuf.UnknownFieldSet unknownFields;
11415        @java.lang.Override
11416        public final com.google.protobuf.UnknownFieldSet
11417            getUnknownFields() {
11418          return this.unknownFields;
11419        }
11420        private INodeDirectorySection(
11421            com.google.protobuf.CodedInputStream input,
11422            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11423            throws com.google.protobuf.InvalidProtocolBufferException {
11424          initFields();
11425          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11426              com.google.protobuf.UnknownFieldSet.newBuilder();
11427          try {
11428            boolean done = false;
11429            while (!done) {
11430              int tag = input.readTag();
11431              switch (tag) {
11432                case 0:
11433                  done = true;
11434                  break;
11435                default: {
11436                  if (!parseUnknownField(input, unknownFields,
11437                                         extensionRegistry, tag)) {
11438                    done = true;
11439                  }
11440                  break;
11441                }
11442              }
11443            }
11444          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11445            throw e.setUnfinishedMessage(this);
11446          } catch (java.io.IOException e) {
11447            throw new com.google.protobuf.InvalidProtocolBufferException(
11448                e.getMessage()).setUnfinishedMessage(this);
11449          } finally {
11450            this.unknownFields = unknownFields.build();
11451            makeExtensionsImmutable();
11452          }
11453        }
11454        public static final com.google.protobuf.Descriptors.Descriptor
11455            getDescriptor() {
11456          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
11457        }
11458    
11459        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11460            internalGetFieldAccessorTable() {
11461          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable
11462              .ensureFieldAccessorsInitialized(
11463                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.Builder.class);
11464        }
11465    
11466        public static com.google.protobuf.Parser<INodeDirectorySection> PARSER =
11467            new com.google.protobuf.AbstractParser<INodeDirectorySection>() {
11468          public INodeDirectorySection parsePartialFrom(
11469              com.google.protobuf.CodedInputStream input,
11470              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11471              throws com.google.protobuf.InvalidProtocolBufferException {
11472            return new INodeDirectorySection(input, extensionRegistry);
11473          }
11474        };
11475    
11476        @java.lang.Override
11477        public com.google.protobuf.Parser<INodeDirectorySection> getParserForType() {
11478          return PARSER;
11479        }
11480    
11481        public interface DirEntryOrBuilder
11482            extends com.google.protobuf.MessageOrBuilder {
11483    
11484          // optional uint64 parent = 1;
11485          /**
11486           * <code>optional uint64 parent = 1;</code>
11487           */
11488          boolean hasParent();
11489          /**
11490           * <code>optional uint64 parent = 1;</code>
11491           */
11492          long getParent();
11493    
11494          // repeated uint64 children = 2 [packed = true];
11495          /**
11496           * <code>repeated uint64 children = 2 [packed = true];</code>
11497           *
11498           * <pre>
11499           * children that are not reference nodes
11500           * </pre>
11501           */
11502          java.util.List<java.lang.Long> getChildrenList();
11503          /**
11504           * <code>repeated uint64 children = 2 [packed = true];</code>
11505           *
11506           * <pre>
11507           * children that are not reference nodes
11508           * </pre>
11509           */
11510          int getChildrenCount();
11511          /**
11512           * <code>repeated uint64 children = 2 [packed = true];</code>
11513           *
11514           * <pre>
11515           * children that are not reference nodes
11516           * </pre>
11517           */
11518          long getChildren(int index);
11519    
11520          // repeated uint32 refChildren = 3 [packed = true];
11521          /**
11522           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11523           *
11524           * <pre>
11525           * children that are reference nodes, each element is a reference node id
11526           * </pre>
11527           */
11528          java.util.List<java.lang.Integer> getRefChildrenList();
11529          /**
11530           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11531           *
11532           * <pre>
11533           * children that are reference nodes, each element is a reference node id
11534           * </pre>
11535           */
11536          int getRefChildrenCount();
11537          /**
11538           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11539           *
11540           * <pre>
11541           * children that are reference nodes, each element is a reference node id
11542           * </pre>
11543           */
11544          int getRefChildren(int index);
11545        }
11546        /**
11547         * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry}
11548         *
11549         * <pre>
11550         **
11551         * A single DirEntry needs to fit in the default PB max message size of
11552         * 64MB. Please be careful when adding more fields to a DirEntry!
11553         * </pre>
11554         */
11555        public static final class DirEntry extends
11556            com.google.protobuf.GeneratedMessage
11557            implements DirEntryOrBuilder {
11558          // Use DirEntry.newBuilder() to construct.
11559          private DirEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11560            super(builder);
11561            this.unknownFields = builder.getUnknownFields();
11562          }
11563          private DirEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11564    
11565          private static final DirEntry defaultInstance;
11566          public static DirEntry getDefaultInstance() {
11567            return defaultInstance;
11568          }
11569    
11570          public DirEntry getDefaultInstanceForType() {
11571            return defaultInstance;
11572          }
11573    
11574          private final com.google.protobuf.UnknownFieldSet unknownFields;
11575          @java.lang.Override
11576          public final com.google.protobuf.UnknownFieldSet
11577              getUnknownFields() {
11578            return this.unknownFields;
11579          }
11580          private DirEntry(
11581              com.google.protobuf.CodedInputStream input,
11582              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11583              throws com.google.protobuf.InvalidProtocolBufferException {
11584            initFields();
11585            int mutable_bitField0_ = 0;
11586            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11587                com.google.protobuf.UnknownFieldSet.newBuilder();
11588            try {
11589              boolean done = false;
11590              while (!done) {
11591                int tag = input.readTag();
11592                switch (tag) {
11593                  case 0:
11594                    done = true;
11595                    break;
11596                  default: {
11597                    if (!parseUnknownField(input, unknownFields,
11598                                           extensionRegistry, tag)) {
11599                      done = true;
11600                    }
11601                    break;
11602                  }
11603                  case 8: {
11604                    bitField0_ |= 0x00000001;
11605                    parent_ = input.readUInt64();
11606                    break;
11607                  }
11608                  case 16: {
11609                    if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
11610                      children_ = new java.util.ArrayList<java.lang.Long>();
11611                      mutable_bitField0_ |= 0x00000002;
11612                    }
11613                    children_.add(input.readUInt64());
11614                    break;
11615                  }
11616                  case 18: {
11617                    int length = input.readRawVarint32();
11618                    int limit = input.pushLimit(length);
11619                    if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) {
11620                      children_ = new java.util.ArrayList<java.lang.Long>();
11621                      mutable_bitField0_ |= 0x00000002;
11622                    }
11623                    while (input.getBytesUntilLimit() > 0) {
11624                      children_.add(input.readUInt64());
11625                    }
11626                    input.popLimit(limit);
11627                    break;
11628                  }
11629                  case 24: {
11630                    if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
11631                      refChildren_ = new java.util.ArrayList<java.lang.Integer>();
11632                      mutable_bitField0_ |= 0x00000004;
11633                    }
11634                    refChildren_.add(input.readUInt32());
11635                    break;
11636                  }
11637                  case 26: {
11638                    int length = input.readRawVarint32();
11639                    int limit = input.pushLimit(length);
11640                    if (!((mutable_bitField0_ & 0x00000004) == 0x00000004) && input.getBytesUntilLimit() > 0) {
11641                      refChildren_ = new java.util.ArrayList<java.lang.Integer>();
11642                      mutable_bitField0_ |= 0x00000004;
11643                    }
11644                    while (input.getBytesUntilLimit() > 0) {
11645                      refChildren_.add(input.readUInt32());
11646                    }
11647                    input.popLimit(limit);
11648                    break;
11649                  }
11650                }
11651              }
11652            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11653              throw e.setUnfinishedMessage(this);
11654            } catch (java.io.IOException e) {
11655              throw new com.google.protobuf.InvalidProtocolBufferException(
11656                  e.getMessage()).setUnfinishedMessage(this);
11657            } finally {
11658              if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
11659                children_ = java.util.Collections.unmodifiableList(children_);
11660              }
11661              if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
11662                refChildren_ = java.util.Collections.unmodifiableList(refChildren_);
11663              }
11664              this.unknownFields = unknownFields.build();
11665              makeExtensionsImmutable();
11666            }
11667          }
11668          public static final com.google.protobuf.Descriptors.Descriptor
11669              getDescriptor() {
11670            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
11671          }
11672    
11673          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11674              internalGetFieldAccessorTable() {
11675            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable
11676                .ensureFieldAccessorsInitialized(
11677                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.Builder.class);
11678          }
11679    
11680          public static com.google.protobuf.Parser<DirEntry> PARSER =
11681              new com.google.protobuf.AbstractParser<DirEntry>() {
11682            public DirEntry parsePartialFrom(
11683                com.google.protobuf.CodedInputStream input,
11684                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11685                throws com.google.protobuf.InvalidProtocolBufferException {
11686              return new DirEntry(input, extensionRegistry);
11687            }
11688          };
11689    
11690          @java.lang.Override
11691          public com.google.protobuf.Parser<DirEntry> getParserForType() {
11692            return PARSER;
11693          }
11694    
11695          private int bitField0_;
11696          // optional uint64 parent = 1;
11697          public static final int PARENT_FIELD_NUMBER = 1;
11698          private long parent_;
11699          /**
11700           * <code>optional uint64 parent = 1;</code>
11701           */
11702          public boolean hasParent() {
11703            return ((bitField0_ & 0x00000001) == 0x00000001);
11704          }
11705          /**
11706           * <code>optional uint64 parent = 1;</code>
11707           */
11708          public long getParent() {
11709            return parent_;
11710          }
11711    
11712          // repeated uint64 children = 2 [packed = true];
11713          public static final int CHILDREN_FIELD_NUMBER = 2;
11714          private java.util.List<java.lang.Long> children_;
11715          /**
11716           * <code>repeated uint64 children = 2 [packed = true];</code>
11717           *
11718           * <pre>
11719           * children that are not reference nodes
11720           * </pre>
11721           */
11722          public java.util.List<java.lang.Long>
11723              getChildrenList() {
11724            return children_;
11725          }
11726          /**
11727           * <code>repeated uint64 children = 2 [packed = true];</code>
11728           *
11729           * <pre>
11730           * children that are not reference nodes
11731           * </pre>
11732           */
11733          public int getChildrenCount() {
11734            return children_.size();
11735          }
11736          /**
11737           * <code>repeated uint64 children = 2 [packed = true];</code>
11738           *
11739           * <pre>
11740           * children that are not reference nodes
11741           * </pre>
11742           */
11743          public long getChildren(int index) {
11744            return children_.get(index);
11745          }
11746          private int childrenMemoizedSerializedSize = -1;
11747    
11748          // repeated uint32 refChildren = 3 [packed = true];
11749          public static final int REFCHILDREN_FIELD_NUMBER = 3;
11750          private java.util.List<java.lang.Integer> refChildren_;
11751          /**
11752           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11753           *
11754           * <pre>
11755           * children that are reference nodes, each element is a reference node id
11756           * </pre>
11757           */
11758          public java.util.List<java.lang.Integer>
11759              getRefChildrenList() {
11760            return refChildren_;
11761          }
11762          /**
11763           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11764           *
11765           * <pre>
11766           * children that are reference nodes, each element is a reference node id
11767           * </pre>
11768           */
11769          public int getRefChildrenCount() {
11770            return refChildren_.size();
11771          }
11772          /**
11773           * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11774           *
11775           * <pre>
11776           * children that are reference nodes, each element is a reference node id
11777           * </pre>
11778           */
11779          public int getRefChildren(int index) {
11780            return refChildren_.get(index);
11781          }
11782          private int refChildrenMemoizedSerializedSize = -1;
11783    
11784          private void initFields() {
11785            parent_ = 0L;
11786            children_ = java.util.Collections.emptyList();
11787            refChildren_ = java.util.Collections.emptyList();
11788          }
11789          private byte memoizedIsInitialized = -1;
11790          public final boolean isInitialized() {
11791            byte isInitialized = memoizedIsInitialized;
11792            if (isInitialized != -1) return isInitialized == 1;
11793    
11794            memoizedIsInitialized = 1;
11795            return true;
11796          }
11797    
11798          public void writeTo(com.google.protobuf.CodedOutputStream output)
11799                              throws java.io.IOException {
11800            getSerializedSize();
11801            if (((bitField0_ & 0x00000001) == 0x00000001)) {
11802              output.writeUInt64(1, parent_);
11803            }
11804            if (getChildrenList().size() > 0) {
11805              output.writeRawVarint32(18);
11806              output.writeRawVarint32(childrenMemoizedSerializedSize);
11807            }
11808            for (int i = 0; i < children_.size(); i++) {
11809              output.writeUInt64NoTag(children_.get(i));
11810            }
11811            if (getRefChildrenList().size() > 0) {
11812              output.writeRawVarint32(26);
11813              output.writeRawVarint32(refChildrenMemoizedSerializedSize);
11814            }
11815            for (int i = 0; i < refChildren_.size(); i++) {
11816              output.writeUInt32NoTag(refChildren_.get(i));
11817            }
11818            getUnknownFields().writeTo(output);
11819          }
11820    
11821          private int memoizedSerializedSize = -1;
11822          public int getSerializedSize() {
11823            int size = memoizedSerializedSize;
11824            if (size != -1) return size;
11825    
11826            size = 0;
11827            if (((bitField0_ & 0x00000001) == 0x00000001)) {
11828              size += com.google.protobuf.CodedOutputStream
11829                .computeUInt64Size(1, parent_);
11830            }
11831            {
11832              int dataSize = 0;
11833              for (int i = 0; i < children_.size(); i++) {
11834                dataSize += com.google.protobuf.CodedOutputStream
11835                  .computeUInt64SizeNoTag(children_.get(i));
11836              }
11837              size += dataSize;
11838              if (!getChildrenList().isEmpty()) {
11839                size += 1;
11840                size += com.google.protobuf.CodedOutputStream
11841                    .computeInt32SizeNoTag(dataSize);
11842              }
11843              childrenMemoizedSerializedSize = dataSize;
11844            }
11845            {
11846              int dataSize = 0;
11847              for (int i = 0; i < refChildren_.size(); i++) {
11848                dataSize += com.google.protobuf.CodedOutputStream
11849                  .computeUInt32SizeNoTag(refChildren_.get(i));
11850              }
11851              size += dataSize;
11852              if (!getRefChildrenList().isEmpty()) {
11853                size += 1;
11854                size += com.google.protobuf.CodedOutputStream
11855                    .computeInt32SizeNoTag(dataSize);
11856              }
11857              refChildrenMemoizedSerializedSize = dataSize;
11858            }
11859            size += getUnknownFields().getSerializedSize();
11860            memoizedSerializedSize = size;
11861            return size;
11862          }
11863    
11864          private static final long serialVersionUID = 0L;
11865          @java.lang.Override
11866          protected java.lang.Object writeReplace()
11867              throws java.io.ObjectStreamException {
11868            return super.writeReplace();
11869          }
11870    
11871          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11872              com.google.protobuf.ByteString data)
11873              throws com.google.protobuf.InvalidProtocolBufferException {
11874            return PARSER.parseFrom(data);
11875          }
11876          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11877              com.google.protobuf.ByteString data,
11878              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11879              throws com.google.protobuf.InvalidProtocolBufferException {
11880            return PARSER.parseFrom(data, extensionRegistry);
11881          }
11882          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(byte[] data)
11883              throws com.google.protobuf.InvalidProtocolBufferException {
11884            return PARSER.parseFrom(data);
11885          }
11886          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11887              byte[] data,
11888              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11889              throws com.google.protobuf.InvalidProtocolBufferException {
11890            return PARSER.parseFrom(data, extensionRegistry);
11891          }
11892          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(java.io.InputStream input)
11893              throws java.io.IOException {
11894            return PARSER.parseFrom(input);
11895          }
11896          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11897              java.io.InputStream input,
11898              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11899              throws java.io.IOException {
11900            return PARSER.parseFrom(input, extensionRegistry);
11901          }
11902          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseDelimitedFrom(java.io.InputStream input)
11903              throws java.io.IOException {
11904            return PARSER.parseDelimitedFrom(input);
11905          }
11906          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseDelimitedFrom(
11907              java.io.InputStream input,
11908              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11909              throws java.io.IOException {
11910            return PARSER.parseDelimitedFrom(input, extensionRegistry);
11911          }
11912          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11913              com.google.protobuf.CodedInputStream input)
11914              throws java.io.IOException {
11915            return PARSER.parseFrom(input);
11916          }
11917          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11918              com.google.protobuf.CodedInputStream input,
11919              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11920              throws java.io.IOException {
11921            return PARSER.parseFrom(input, extensionRegistry);
11922          }
11923    
11924          public static Builder newBuilder() { return Builder.create(); }
11925          public Builder newBuilderForType() { return newBuilder(); }
11926          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry prototype) {
11927            return newBuilder().mergeFrom(prototype);
11928          }
11929          public Builder toBuilder() { return newBuilder(this); }
11930    
11931          @java.lang.Override
11932          protected Builder newBuilderForType(
11933              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11934            Builder builder = new Builder(parent);
11935            return builder;
11936          }
11937          /**
11938           * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry}
11939           *
11940           * <pre>
11941           **
11942           * A single DirEntry needs to fit in the default PB max message size of
11943           * 64MB. Please be careful when adding more fields to a DirEntry!
11944           * </pre>
11945           */
11946          public static final class Builder extends
11947              com.google.protobuf.GeneratedMessage.Builder<Builder>
11948             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntryOrBuilder {
11949            public static final com.google.protobuf.Descriptors.Descriptor
11950                getDescriptor() {
11951              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
11952            }
11953    
11954            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11955                internalGetFieldAccessorTable() {
11956              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable
11957                  .ensureFieldAccessorsInitialized(
11958                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.Builder.class);
11959            }
11960    
11961            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.newBuilder()
11962            private Builder() {
11963              maybeForceBuilderInitialization();
11964            }
11965    
11966            private Builder(
11967                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11968              super(parent);
11969              maybeForceBuilderInitialization();
11970            }
11971            private void maybeForceBuilderInitialization() {
11972              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11973              }
11974            }
11975            private static Builder create() {
11976              return new Builder();
11977            }
11978    
11979            public Builder clear() {
11980              super.clear();
11981              parent_ = 0L;
11982              bitField0_ = (bitField0_ & ~0x00000001);
11983              children_ = java.util.Collections.emptyList();
11984              bitField0_ = (bitField0_ & ~0x00000002);
11985              refChildren_ = java.util.Collections.emptyList();
11986              bitField0_ = (bitField0_ & ~0x00000004);
11987              return this;
11988            }
11989    
11990            public Builder clone() {
11991              return create().mergeFrom(buildPartial());
11992            }
11993    
11994            public com.google.protobuf.Descriptors.Descriptor
11995                getDescriptorForType() {
11996              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
11997            }
11998    
11999            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry getDefaultInstanceForType() {
12000              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.getDefaultInstance();
12001            }
12002    
12003            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry build() {
12004              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry result = buildPartial();
12005              if (!result.isInitialized()) {
12006                throw newUninitializedMessageException(result);
12007              }
12008              return result;
12009            }
12010    
12011            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry buildPartial() {
12012              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry(this);
12013              int from_bitField0_ = bitField0_;
12014              int to_bitField0_ = 0;
12015              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
12016                to_bitField0_ |= 0x00000001;
12017              }
12018              result.parent_ = parent_;
12019              if (((bitField0_ & 0x00000002) == 0x00000002)) {
12020                children_ = java.util.Collections.unmodifiableList(children_);
12021                bitField0_ = (bitField0_ & ~0x00000002);
12022              }
12023              result.children_ = children_;
12024              if (((bitField0_ & 0x00000004) == 0x00000004)) {
12025                refChildren_ = java.util.Collections.unmodifiableList(refChildren_);
12026                bitField0_ = (bitField0_ & ~0x00000004);
12027              }
12028              result.refChildren_ = refChildren_;
12029              result.bitField0_ = to_bitField0_;
12030              onBuilt();
12031              return result;
12032            }
12033    
12034            public Builder mergeFrom(com.google.protobuf.Message other) {
12035              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry) {
12036                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry)other);
12037              } else {
12038                super.mergeFrom(other);
12039                return this;
12040              }
12041            }
12042    
12043            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry other) {
12044              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.getDefaultInstance()) return this;
12045              if (other.hasParent()) {
12046                setParent(other.getParent());
12047              }
12048              if (!other.children_.isEmpty()) {
12049                if (children_.isEmpty()) {
12050                  children_ = other.children_;
12051                  bitField0_ = (bitField0_ & ~0x00000002);
12052                } else {
12053                  ensureChildrenIsMutable();
12054                  children_.addAll(other.children_);
12055                }
12056                onChanged();
12057              }
12058              if (!other.refChildren_.isEmpty()) {
12059                if (refChildren_.isEmpty()) {
12060                  refChildren_ = other.refChildren_;
12061                  bitField0_ = (bitField0_ & ~0x00000004);
12062                } else {
12063                  ensureRefChildrenIsMutable();
12064                  refChildren_.addAll(other.refChildren_);
12065                }
12066                onChanged();
12067              }
12068              this.mergeUnknownFields(other.getUnknownFields());
12069              return this;
12070            }
12071    
12072            public final boolean isInitialized() {
12073              return true;
12074            }
12075    
12076            public Builder mergeFrom(
12077                com.google.protobuf.CodedInputStream input,
12078                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12079                throws java.io.IOException {
12080              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parsedMessage = null;
12081              try {
12082                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
12083              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12084                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry) e.getUnfinishedMessage();
12085                throw e;
12086              } finally {
12087                if (parsedMessage != null) {
12088                  mergeFrom(parsedMessage);
12089                }
12090              }
12091              return this;
12092            }
12093            private int bitField0_;
12094    
12095            // optional uint64 parent = 1;
12096            private long parent_ ;
12097            /**
12098             * <code>optional uint64 parent = 1;</code>
12099             */
12100            public boolean hasParent() {
12101              return ((bitField0_ & 0x00000001) == 0x00000001);
12102            }
12103            /**
12104             * <code>optional uint64 parent = 1;</code>
12105             */
12106            public long getParent() {
12107              return parent_;
12108            }
12109            /**
12110             * <code>optional uint64 parent = 1;</code>
12111             */
12112            public Builder setParent(long value) {
12113              bitField0_ |= 0x00000001;
12114              parent_ = value;
12115              onChanged();
12116              return this;
12117            }
12118            /**
12119             * <code>optional uint64 parent = 1;</code>
12120             */
12121            public Builder clearParent() {
12122              bitField0_ = (bitField0_ & ~0x00000001);
12123              parent_ = 0L;
12124              onChanged();
12125              return this;
12126            }
12127    
12128            // repeated uint64 children = 2 [packed = true];
12129            private java.util.List<java.lang.Long> children_ = java.util.Collections.emptyList();
12130            private void ensureChildrenIsMutable() {
12131              if (!((bitField0_ & 0x00000002) == 0x00000002)) {
12132                children_ = new java.util.ArrayList<java.lang.Long>(children_);
12133                bitField0_ |= 0x00000002;
12134               }
12135            }
12136            /**
12137             * <code>repeated uint64 children = 2 [packed = true];</code>
12138             *
12139             * <pre>
12140             * children that are not reference nodes
12141             * </pre>
12142             */
12143            public java.util.List<java.lang.Long>
12144                getChildrenList() {
12145              return java.util.Collections.unmodifiableList(children_);
12146            }
12147            /**
12148             * <code>repeated uint64 children = 2 [packed = true];</code>
12149             *
12150             * <pre>
12151             * children that are not reference nodes
12152             * </pre>
12153             */
12154            public int getChildrenCount() {
12155              return children_.size();
12156            }
12157            /**
12158             * <code>repeated uint64 children = 2 [packed = true];</code>
12159             *
12160             * <pre>
12161             * children that are not reference nodes
12162             * </pre>
12163             */
12164            public long getChildren(int index) {
12165              return children_.get(index);
12166            }
12167            /**
12168             * <code>repeated uint64 children = 2 [packed = true];</code>
12169             *
12170             * <pre>
12171             * children that are not reference nodes
12172             * </pre>
12173             */
12174            public Builder setChildren(
12175                int index, long value) {
12176              ensureChildrenIsMutable();
12177              children_.set(index, value);
12178              onChanged();
12179              return this;
12180            }
12181            /**
12182             * <code>repeated uint64 children = 2 [packed = true];</code>
12183             *
12184             * <pre>
12185             * children that are not reference nodes
12186             * </pre>
12187             */
12188            public Builder addChildren(long value) {
12189              ensureChildrenIsMutable();
12190              children_.add(value);
12191              onChanged();
12192              return this;
12193            }
12194            /**
12195             * <code>repeated uint64 children = 2 [packed = true];</code>
12196             *
12197             * <pre>
12198             * children that are not reference nodes
12199             * </pre>
12200             */
12201            public Builder addAllChildren(
12202                java.lang.Iterable<? extends java.lang.Long> values) {
12203              ensureChildrenIsMutable();
12204              super.addAll(values, children_);
12205              onChanged();
12206              return this;
12207            }
12208            /**
12209             * <code>repeated uint64 children = 2 [packed = true];</code>
12210             *
12211             * <pre>
12212             * children that are not reference nodes
12213             * </pre>
12214             */
12215            public Builder clearChildren() {
12216              children_ = java.util.Collections.emptyList();
12217              bitField0_ = (bitField0_ & ~0x00000002);
12218              onChanged();
12219              return this;
12220            }
12221    
12222            // repeated uint32 refChildren = 3 [packed = true];
12223            private java.util.List<java.lang.Integer> refChildren_ = java.util.Collections.emptyList();
12224            private void ensureRefChildrenIsMutable() {
12225              if (!((bitField0_ & 0x00000004) == 0x00000004)) {
12226                refChildren_ = new java.util.ArrayList<java.lang.Integer>(refChildren_);
12227                bitField0_ |= 0x00000004;
12228               }
12229            }
12230            /**
12231             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12232             *
12233             * <pre>
12234             * children that are reference nodes, each element is a reference node id
12235             * </pre>
12236             */
12237            public java.util.List<java.lang.Integer>
12238                getRefChildrenList() {
12239              return java.util.Collections.unmodifiableList(refChildren_);
12240            }
12241            /**
12242             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12243             *
12244             * <pre>
12245             * children that are reference nodes, each element is a reference node id
12246             * </pre>
12247             */
12248            public int getRefChildrenCount() {
12249              return refChildren_.size();
12250            }
12251            /**
12252             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12253             *
12254             * <pre>
12255             * children that are reference nodes, each element is a reference node id
12256             * </pre>
12257             */
12258            public int getRefChildren(int index) {
12259              return refChildren_.get(index);
12260            }
12261            /**
12262             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12263             *
12264             * <pre>
12265             * children that are reference nodes, each element is a reference node id
12266             * </pre>
12267             */
12268            public Builder setRefChildren(
12269                int index, int value) {
12270              ensureRefChildrenIsMutable();
12271              refChildren_.set(index, value);
12272              onChanged();
12273              return this;
12274            }
12275            /**
12276             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12277             *
12278             * <pre>
12279             * children that are reference nodes, each element is a reference node id
12280             * </pre>
12281             */
12282            public Builder addRefChildren(int value) {
12283              ensureRefChildrenIsMutable();
12284              refChildren_.add(value);
12285              onChanged();
12286              return this;
12287            }
12288            /**
12289             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12290             *
12291             * <pre>
12292             * children that are reference nodes, each element is a reference node id
12293             * </pre>
12294             */
12295            public Builder addAllRefChildren(
12296                java.lang.Iterable<? extends java.lang.Integer> values) {
12297              ensureRefChildrenIsMutable();
12298              super.addAll(values, refChildren_);
12299              onChanged();
12300              return this;
12301            }
12302            /**
12303             * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12304             *
12305             * <pre>
12306             * children that are reference nodes, each element is a reference node id
12307             * </pre>
12308             */
12309            public Builder clearRefChildren() {
12310              refChildren_ = java.util.Collections.emptyList();
12311              bitField0_ = (bitField0_ & ~0x00000004);
12312              onChanged();
12313              return this;
12314            }
12315    
12316            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry)
12317          }
12318    
12319          static {
12320            defaultInstance = new DirEntry(true);
12321            defaultInstance.initFields();
12322          }
12323    
12324          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry)
12325        }
12326    
12327        private void initFields() {
12328        }
12329        private byte memoizedIsInitialized = -1;
12330        public final boolean isInitialized() {
12331          byte isInitialized = memoizedIsInitialized;
12332          if (isInitialized != -1) return isInitialized == 1;
12333    
12334          memoizedIsInitialized = 1;
12335          return true;
12336        }
12337    
12338        public void writeTo(com.google.protobuf.CodedOutputStream output)
12339                            throws java.io.IOException {
12340          getSerializedSize();
12341          getUnknownFields().writeTo(output);
12342        }
12343    
12344        private int memoizedSerializedSize = -1;
12345        public int getSerializedSize() {
12346          int size = memoizedSerializedSize;
12347          if (size != -1) return size;
12348    
12349          size = 0;
12350          size += getUnknownFields().getSerializedSize();
12351          memoizedSerializedSize = size;
12352          return size;
12353        }
12354    
12355        private static final long serialVersionUID = 0L;
12356        @java.lang.Override
12357        protected java.lang.Object writeReplace()
12358            throws java.io.ObjectStreamException {
12359          return super.writeReplace();
12360        }
12361    
12362        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12363            com.google.protobuf.ByteString data)
12364            throws com.google.protobuf.InvalidProtocolBufferException {
12365          return PARSER.parseFrom(data);
12366        }
12367        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12368            com.google.protobuf.ByteString data,
12369            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12370            throws com.google.protobuf.InvalidProtocolBufferException {
12371          return PARSER.parseFrom(data, extensionRegistry);
12372        }
12373        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(byte[] data)
12374            throws com.google.protobuf.InvalidProtocolBufferException {
12375          return PARSER.parseFrom(data);
12376        }
12377        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12378            byte[] data,
12379            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12380            throws com.google.protobuf.InvalidProtocolBufferException {
12381          return PARSER.parseFrom(data, extensionRegistry);
12382        }
12383        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(java.io.InputStream input)
12384            throws java.io.IOException {
12385          return PARSER.parseFrom(input);
12386        }
12387        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12388            java.io.InputStream input,
12389            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12390            throws java.io.IOException {
12391          return PARSER.parseFrom(input, extensionRegistry);
12392        }
12393        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseDelimitedFrom(java.io.InputStream input)
12394            throws java.io.IOException {
12395          return PARSER.parseDelimitedFrom(input);
12396        }
12397        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseDelimitedFrom(
12398            java.io.InputStream input,
12399            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12400            throws java.io.IOException {
12401          return PARSER.parseDelimitedFrom(input, extensionRegistry);
12402        }
12403        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12404            com.google.protobuf.CodedInputStream input)
12405            throws java.io.IOException {
12406          return PARSER.parseFrom(input);
12407        }
12408        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12409            com.google.protobuf.CodedInputStream input,
12410            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12411            throws java.io.IOException {
12412          return PARSER.parseFrom(input, extensionRegistry);
12413        }
12414    
12415        public static Builder newBuilder() { return Builder.create(); }
12416        public Builder newBuilderForType() { return newBuilder(); }
12417        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection prototype) {
12418          return newBuilder().mergeFrom(prototype);
12419        }
12420        public Builder toBuilder() { return newBuilder(this); }
12421    
12422        @java.lang.Override
12423        protected Builder newBuilderForType(
12424            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12425          Builder builder = new Builder(parent);
12426          return builder;
12427        }
12428        /**
12429         * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection}
12430         *
12431         * <pre>
12432         **
12433         * This section records the children of each directories
12434         * NAME: INODE_DIR
12435         * </pre>
12436         */
12437        public static final class Builder extends
12438            com.google.protobuf.GeneratedMessage.Builder<Builder>
12439           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySectionOrBuilder {
12440          public static final com.google.protobuf.Descriptors.Descriptor
12441              getDescriptor() {
12442            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
12443          }
12444    
12445          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12446              internalGetFieldAccessorTable() {
12447            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable
12448                .ensureFieldAccessorsInitialized(
12449                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.Builder.class);
12450          }
12451    
12452          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.newBuilder()
12453          private Builder() {
12454            maybeForceBuilderInitialization();
12455          }
12456    
12457          private Builder(
12458              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12459            super(parent);
12460            maybeForceBuilderInitialization();
12461          }
12462          private void maybeForceBuilderInitialization() {
12463            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12464            }
12465          }
12466          private static Builder create() {
12467            return new Builder();
12468          }
12469    
12470          public Builder clear() {
12471            super.clear();
12472            return this;
12473          }
12474    
12475          public Builder clone() {
12476            return create().mergeFrom(buildPartial());
12477          }
12478    
12479          public com.google.protobuf.Descriptors.Descriptor
12480              getDescriptorForType() {
12481            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
12482          }
12483    
12484          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection getDefaultInstanceForType() {
12485            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.getDefaultInstance();
12486          }
12487    
12488          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection build() {
12489            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection result = buildPartial();
12490            if (!result.isInitialized()) {
12491              throw newUninitializedMessageException(result);
12492            }
12493            return result;
12494          }
12495    
12496          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection buildPartial() {
12497            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection(this);
12498            onBuilt();
12499            return result;
12500          }
12501    
12502          public Builder mergeFrom(com.google.protobuf.Message other) {
12503            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection) {
12504              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection)other);
12505            } else {
12506              super.mergeFrom(other);
12507              return this;
12508            }
12509          }
12510    
12511          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection other) {
12512            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.getDefaultInstance()) return this;
12513            this.mergeUnknownFields(other.getUnknownFields());
12514            return this;
12515          }
12516    
12517          public final boolean isInitialized() {
12518            return true;
12519          }
12520    
12521          public Builder mergeFrom(
12522              com.google.protobuf.CodedInputStream input,
12523              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12524              throws java.io.IOException {
12525            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parsedMessage = null;
12526            try {
12527              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
12528            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12529              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection) e.getUnfinishedMessage();
12530              throw e;
12531            } finally {
12532              if (parsedMessage != null) {
12533                mergeFrom(parsedMessage);
12534              }
12535            }
12536            return this;
12537          }
12538    
12539          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeDirectorySection)
12540        }
12541    
12542        static {
12543          defaultInstance = new INodeDirectorySection(true);
12544          defaultInstance.initFields();
12545        }
12546    
12547        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeDirectorySection)
12548      }
12549    
12550      public interface INodeReferenceSectionOrBuilder
12551          extends com.google.protobuf.MessageOrBuilder {
12552      }
12553      /**
12554       * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection}
12555       */
12556      public static final class INodeReferenceSection extends
12557          com.google.protobuf.GeneratedMessage
12558          implements INodeReferenceSectionOrBuilder {
12559        // Use INodeReferenceSection.newBuilder() to construct.
12560        private INodeReferenceSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12561          super(builder);
12562          this.unknownFields = builder.getUnknownFields();
12563        }
12564        private INodeReferenceSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12565    
12566        private static final INodeReferenceSection defaultInstance;
12567        public static INodeReferenceSection getDefaultInstance() {
12568          return defaultInstance;
12569        }
12570    
12571        public INodeReferenceSection getDefaultInstanceForType() {
12572          return defaultInstance;
12573        }
12574    
12575        private final com.google.protobuf.UnknownFieldSet unknownFields;
12576        @java.lang.Override
12577        public final com.google.protobuf.UnknownFieldSet
12578            getUnknownFields() {
12579          return this.unknownFields;
12580        }
12581        private INodeReferenceSection(
12582            com.google.protobuf.CodedInputStream input,
12583            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12584            throws com.google.protobuf.InvalidProtocolBufferException {
12585          initFields();
12586          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12587              com.google.protobuf.UnknownFieldSet.newBuilder();
12588          try {
12589            boolean done = false;
12590            while (!done) {
12591              int tag = input.readTag();
12592              switch (tag) {
12593                case 0:
12594                  done = true;
12595                  break;
12596                default: {
12597                  if (!parseUnknownField(input, unknownFields,
12598                                         extensionRegistry, tag)) {
12599                    done = true;
12600                  }
12601                  break;
12602                }
12603              }
12604            }
12605          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12606            throw e.setUnfinishedMessage(this);
12607          } catch (java.io.IOException e) {
12608            throw new com.google.protobuf.InvalidProtocolBufferException(
12609                e.getMessage()).setUnfinishedMessage(this);
12610          } finally {
12611            this.unknownFields = unknownFields.build();
12612            makeExtensionsImmutable();
12613          }
12614        }
12615        public static final com.google.protobuf.Descriptors.Descriptor
12616            getDescriptor() {
12617          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
12618        }
12619    
12620        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12621            internalGetFieldAccessorTable() {
12622          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable
12623              .ensureFieldAccessorsInitialized(
12624                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.Builder.class);
12625        }
12626    
12627        public static com.google.protobuf.Parser<INodeReferenceSection> PARSER =
12628            new com.google.protobuf.AbstractParser<INodeReferenceSection>() {
12629          public INodeReferenceSection parsePartialFrom(
12630              com.google.protobuf.CodedInputStream input,
12631              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12632              throws com.google.protobuf.InvalidProtocolBufferException {
12633            return new INodeReferenceSection(input, extensionRegistry);
12634          }
12635        };
12636    
12637        @java.lang.Override
12638        public com.google.protobuf.Parser<INodeReferenceSection> getParserForType() {
12639          return PARSER;
12640        }
12641    
12642        public interface INodeReferenceOrBuilder
12643            extends com.google.protobuf.MessageOrBuilder {
12644    
12645          // optional uint64 referredId = 1;
12646          /**
12647           * <code>optional uint64 referredId = 1;</code>
12648           *
12649           * <pre>
12650           * id of the referred inode
12651           * </pre>
12652           */
12653          boolean hasReferredId();
12654          /**
12655           * <code>optional uint64 referredId = 1;</code>
12656           *
12657           * <pre>
12658           * id of the referred inode
12659           * </pre>
12660           */
12661          long getReferredId();
12662    
12663          // optional bytes name = 2;
12664          /**
12665           * <code>optional bytes name = 2;</code>
12666           *
12667           * <pre>
12668           * local name recorded in WithName
12669           * </pre>
12670           */
12671          boolean hasName();
12672          /**
12673           * <code>optional bytes name = 2;</code>
12674           *
12675           * <pre>
12676           * local name recorded in WithName
12677           * </pre>
12678           */
12679          com.google.protobuf.ByteString getName();
12680    
12681          // optional uint32 dstSnapshotId = 3;
12682          /**
12683           * <code>optional uint32 dstSnapshotId = 3;</code>
12684           *
12685           * <pre>
12686           * recorded in DstReference
12687           * </pre>
12688           */
12689          boolean hasDstSnapshotId();
12690          /**
12691           * <code>optional uint32 dstSnapshotId = 3;</code>
12692           *
12693           * <pre>
12694           * recorded in DstReference
12695           * </pre>
12696           */
12697          int getDstSnapshotId();
12698    
12699          // optional uint32 lastSnapshotId = 4;
12700          /**
12701           * <code>optional uint32 lastSnapshotId = 4;</code>
12702           *
12703           * <pre>
12704           * recorded in WithName
12705           * </pre>
12706           */
12707          boolean hasLastSnapshotId();
12708          /**
12709           * <code>optional uint32 lastSnapshotId = 4;</code>
12710           *
12711           * <pre>
12712           * recorded in WithName
12713           * </pre>
12714           */
12715          int getLastSnapshotId();
12716        }
12717        /**
12718         * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference}
12719         */
12720        public static final class INodeReference extends
12721            com.google.protobuf.GeneratedMessage
12722            implements INodeReferenceOrBuilder {
12723          // Use INodeReference.newBuilder() to construct.
12724          private INodeReference(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12725            super(builder);
12726            this.unknownFields = builder.getUnknownFields();
12727          }
12728          private INodeReference(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12729    
12730          private static final INodeReference defaultInstance;
12731          public static INodeReference getDefaultInstance() {
12732            return defaultInstance;
12733          }
12734    
12735          public INodeReference getDefaultInstanceForType() {
12736            return defaultInstance;
12737          }
12738    
12739          private final com.google.protobuf.UnknownFieldSet unknownFields;
12740          @java.lang.Override
12741          public final com.google.protobuf.UnknownFieldSet
12742              getUnknownFields() {
12743            return this.unknownFields;
12744          }
12745          private INodeReference(
12746              com.google.protobuf.CodedInputStream input,
12747              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12748              throws com.google.protobuf.InvalidProtocolBufferException {
12749            initFields();
12750            int mutable_bitField0_ = 0;
12751            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12752                com.google.protobuf.UnknownFieldSet.newBuilder();
12753            try {
12754              boolean done = false;
12755              while (!done) {
12756                int tag = input.readTag();
12757                switch (tag) {
12758                  case 0:
12759                    done = true;
12760                    break;
12761                  default: {
12762                    if (!parseUnknownField(input, unknownFields,
12763                                           extensionRegistry, tag)) {
12764                      done = true;
12765                    }
12766                    break;
12767                  }
12768                  case 8: {
12769                    bitField0_ |= 0x00000001;
12770                    referredId_ = input.readUInt64();
12771                    break;
12772                  }
12773                  case 18: {
12774                    bitField0_ |= 0x00000002;
12775                    name_ = input.readBytes();
12776                    break;
12777                  }
12778                  case 24: {
12779                    bitField0_ |= 0x00000004;
12780                    dstSnapshotId_ = input.readUInt32();
12781                    break;
12782                  }
12783                  case 32: {
12784                    bitField0_ |= 0x00000008;
12785                    lastSnapshotId_ = input.readUInt32();
12786                    break;
12787                  }
12788                }
12789              }
12790            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12791              throw e.setUnfinishedMessage(this);
12792            } catch (java.io.IOException e) {
12793              throw new com.google.protobuf.InvalidProtocolBufferException(
12794                  e.getMessage()).setUnfinishedMessage(this);
12795            } finally {
12796              this.unknownFields = unknownFields.build();
12797              makeExtensionsImmutable();
12798            }
12799          }
12800          public static final com.google.protobuf.Descriptors.Descriptor
12801              getDescriptor() {
12802            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
12803          }
12804    
12805          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12806              internalGetFieldAccessorTable() {
12807            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable
12808                .ensureFieldAccessorsInitialized(
12809                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.Builder.class);
12810          }
12811    
12812          public static com.google.protobuf.Parser<INodeReference> PARSER =
12813              new com.google.protobuf.AbstractParser<INodeReference>() {
12814            public INodeReference parsePartialFrom(
12815                com.google.protobuf.CodedInputStream input,
12816                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12817                throws com.google.protobuf.InvalidProtocolBufferException {
12818              return new INodeReference(input, extensionRegistry);
12819            }
12820          };
12821    
12822          @java.lang.Override
12823          public com.google.protobuf.Parser<INodeReference> getParserForType() {
12824            return PARSER;
12825          }
12826    
12827          private int bitField0_;
12828          // optional uint64 referredId = 1;
12829          public static final int REFERREDID_FIELD_NUMBER = 1;
12830          private long referredId_;
12831          /**
12832           * <code>optional uint64 referredId = 1;</code>
12833           *
12834           * <pre>
12835           * id of the referred inode
12836           * </pre>
12837           */
12838          public boolean hasReferredId() {
12839            return ((bitField0_ & 0x00000001) == 0x00000001);
12840          }
12841          /**
12842           * <code>optional uint64 referredId = 1;</code>
12843           *
12844           * <pre>
12845           * id of the referred inode
12846           * </pre>
12847           */
12848          public long getReferredId() {
12849            return referredId_;
12850          }
12851    
12852          // optional bytes name = 2;
12853          public static final int NAME_FIELD_NUMBER = 2;
12854          private com.google.protobuf.ByteString name_;
12855          /**
12856           * <code>optional bytes name = 2;</code>
12857           *
12858           * <pre>
12859           * local name recorded in WithName
12860           * </pre>
12861           */
12862          public boolean hasName() {
12863            return ((bitField0_ & 0x00000002) == 0x00000002);
12864          }
12865          /**
12866           * <code>optional bytes name = 2;</code>
12867           *
12868           * <pre>
12869           * local name recorded in WithName
12870           * </pre>
12871           */
12872          public com.google.protobuf.ByteString getName() {
12873            return name_;
12874          }
12875    
12876          // optional uint32 dstSnapshotId = 3;
12877          public static final int DSTSNAPSHOTID_FIELD_NUMBER = 3;
12878          private int dstSnapshotId_;
12879          /**
12880           * <code>optional uint32 dstSnapshotId = 3;</code>
12881           *
12882           * <pre>
12883           * recorded in DstReference
12884           * </pre>
12885           */
12886          public boolean hasDstSnapshotId() {
12887            return ((bitField0_ & 0x00000004) == 0x00000004);
12888          }
12889          /**
12890           * <code>optional uint32 dstSnapshotId = 3;</code>
12891           *
12892           * <pre>
12893           * recorded in DstReference
12894           * </pre>
12895           */
12896          public int getDstSnapshotId() {
12897            return dstSnapshotId_;
12898          }
12899    
12900          // optional uint32 lastSnapshotId = 4;
12901          public static final int LASTSNAPSHOTID_FIELD_NUMBER = 4;
12902          private int lastSnapshotId_;
12903          /**
12904           * <code>optional uint32 lastSnapshotId = 4;</code>
12905           *
12906           * <pre>
12907           * recorded in WithName
12908           * </pre>
12909           */
12910          public boolean hasLastSnapshotId() {
12911            return ((bitField0_ & 0x00000008) == 0x00000008);
12912          }
12913          /**
12914           * <code>optional uint32 lastSnapshotId = 4;</code>
12915           *
12916           * <pre>
12917           * recorded in WithName
12918           * </pre>
12919           */
12920          public int getLastSnapshotId() {
12921            return lastSnapshotId_;
12922          }
12923    
12924          private void initFields() {
12925            referredId_ = 0L;
12926            name_ = com.google.protobuf.ByteString.EMPTY;
12927            dstSnapshotId_ = 0;
12928            lastSnapshotId_ = 0;
12929          }
12930          private byte memoizedIsInitialized = -1;
12931          public final boolean isInitialized() {
12932            byte isInitialized = memoizedIsInitialized;
12933            if (isInitialized != -1) return isInitialized == 1;
12934    
12935            memoizedIsInitialized = 1;
12936            return true;
12937          }
12938    
12939          public void writeTo(com.google.protobuf.CodedOutputStream output)
12940                              throws java.io.IOException {
12941            getSerializedSize();
12942            if (((bitField0_ & 0x00000001) == 0x00000001)) {
12943              output.writeUInt64(1, referredId_);
12944            }
12945            if (((bitField0_ & 0x00000002) == 0x00000002)) {
12946              output.writeBytes(2, name_);
12947            }
12948            if (((bitField0_ & 0x00000004) == 0x00000004)) {
12949              output.writeUInt32(3, dstSnapshotId_);
12950            }
12951            if (((bitField0_ & 0x00000008) == 0x00000008)) {
12952              output.writeUInt32(4, lastSnapshotId_);
12953            }
12954            getUnknownFields().writeTo(output);
12955          }
12956    
12957          private int memoizedSerializedSize = -1;
12958          public int getSerializedSize() {
12959            int size = memoizedSerializedSize;
12960            if (size != -1) return size;
12961    
12962            size = 0;
12963            if (((bitField0_ & 0x00000001) == 0x00000001)) {
12964              size += com.google.protobuf.CodedOutputStream
12965                .computeUInt64Size(1, referredId_);
12966            }
12967            if (((bitField0_ & 0x00000002) == 0x00000002)) {
12968              size += com.google.protobuf.CodedOutputStream
12969                .computeBytesSize(2, name_);
12970            }
12971            if (((bitField0_ & 0x00000004) == 0x00000004)) {
12972              size += com.google.protobuf.CodedOutputStream
12973                .computeUInt32Size(3, dstSnapshotId_);
12974            }
12975            if (((bitField0_ & 0x00000008) == 0x00000008)) {
12976              size += com.google.protobuf.CodedOutputStream
12977                .computeUInt32Size(4, lastSnapshotId_);
12978            }
12979            size += getUnknownFields().getSerializedSize();
12980            memoizedSerializedSize = size;
12981            return size;
12982          }
12983    
12984          private static final long serialVersionUID = 0L;
12985          @java.lang.Override
12986          protected java.lang.Object writeReplace()
12987              throws java.io.ObjectStreamException {
12988            return super.writeReplace();
12989          }
12990    
12991          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
12992              com.google.protobuf.ByteString data)
12993              throws com.google.protobuf.InvalidProtocolBufferException {
12994            return PARSER.parseFrom(data);
12995          }
12996          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
12997              com.google.protobuf.ByteString data,
12998              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12999              throws com.google.protobuf.InvalidProtocolBufferException {
13000            return PARSER.parseFrom(data, extensionRegistry);
13001          }
13002          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(byte[] data)
13003              throws com.google.protobuf.InvalidProtocolBufferException {
13004            return PARSER.parseFrom(data);
13005          }
13006          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
13007              byte[] data,
13008              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13009              throws com.google.protobuf.InvalidProtocolBufferException {
13010            return PARSER.parseFrom(data, extensionRegistry);
13011          }
13012          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(java.io.InputStream input)
13013              throws java.io.IOException {
13014            return PARSER.parseFrom(input);
13015          }
13016          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
13017              java.io.InputStream input,
13018              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13019              throws java.io.IOException {
13020            return PARSER.parseFrom(input, extensionRegistry);
13021          }
13022          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseDelimitedFrom(java.io.InputStream input)
13023              throws java.io.IOException {
13024            return PARSER.parseDelimitedFrom(input);
13025          }
13026          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseDelimitedFrom(
13027              java.io.InputStream input,
13028              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13029              throws java.io.IOException {
13030            return PARSER.parseDelimitedFrom(input, extensionRegistry);
13031          }
13032          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
13033              com.google.protobuf.CodedInputStream input)
13034              throws java.io.IOException {
13035            return PARSER.parseFrom(input);
13036          }
13037          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
13038              com.google.protobuf.CodedInputStream input,
13039              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13040              throws java.io.IOException {
13041            return PARSER.parseFrom(input, extensionRegistry);
13042          }
13043    
13044          public static Builder newBuilder() { return Builder.create(); }
13045          public Builder newBuilderForType() { return newBuilder(); }
13046          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference prototype) {
13047            return newBuilder().mergeFrom(prototype);
13048          }
13049          public Builder toBuilder() { return newBuilder(this); }
13050    
13051          @java.lang.Override
13052          protected Builder newBuilderForType(
13053              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13054            Builder builder = new Builder(parent);
13055            return builder;
13056          }
13057          /**
13058           * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference}
13059           */
13060          public static final class Builder extends
13061              com.google.protobuf.GeneratedMessage.Builder<Builder>
13062             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReferenceOrBuilder {
13063            public static final com.google.protobuf.Descriptors.Descriptor
13064                getDescriptor() {
13065              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
13066            }
13067    
13068            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13069                internalGetFieldAccessorTable() {
13070              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable
13071                  .ensureFieldAccessorsInitialized(
13072                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.Builder.class);
13073            }
13074    
13075            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.newBuilder()
13076            private Builder() {
13077              maybeForceBuilderInitialization();
13078            }
13079    
13080            private Builder(
13081                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13082              super(parent);
13083              maybeForceBuilderInitialization();
13084            }
13085            private void maybeForceBuilderInitialization() {
13086              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13087              }
13088            }
13089            private static Builder create() {
13090              return new Builder();
13091            }
13092    
13093            public Builder clear() {
13094              super.clear();
13095              referredId_ = 0L;
13096              bitField0_ = (bitField0_ & ~0x00000001);
13097              name_ = com.google.protobuf.ByteString.EMPTY;
13098              bitField0_ = (bitField0_ & ~0x00000002);
13099              dstSnapshotId_ = 0;
13100              bitField0_ = (bitField0_ & ~0x00000004);
13101              lastSnapshotId_ = 0;
13102              bitField0_ = (bitField0_ & ~0x00000008);
13103              return this;
13104            }
13105    
13106            public Builder clone() {
13107              return create().mergeFrom(buildPartial());
13108            }
13109    
13110            public com.google.protobuf.Descriptors.Descriptor
13111                getDescriptorForType() {
13112              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
13113            }
13114    
13115            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference getDefaultInstanceForType() {
13116              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.getDefaultInstance();
13117            }
13118    
13119            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference build() {
13120              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference result = buildPartial();
13121              if (!result.isInitialized()) {
13122                throw newUninitializedMessageException(result);
13123              }
13124              return result;
13125            }
13126    
13127            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference buildPartial() {
13128              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference(this);
13129              int from_bitField0_ = bitField0_;
13130              int to_bitField0_ = 0;
13131              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
13132                to_bitField0_ |= 0x00000001;
13133              }
13134              result.referredId_ = referredId_;
13135              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
13136                to_bitField0_ |= 0x00000002;
13137              }
13138              result.name_ = name_;
13139              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
13140                to_bitField0_ |= 0x00000004;
13141              }
13142              result.dstSnapshotId_ = dstSnapshotId_;
13143              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
13144                to_bitField0_ |= 0x00000008;
13145              }
13146              result.lastSnapshotId_ = lastSnapshotId_;
13147              result.bitField0_ = to_bitField0_;
13148              onBuilt();
13149              return result;
13150            }
13151    
13152            public Builder mergeFrom(com.google.protobuf.Message other) {
13153              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference) {
13154                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference)other);
13155              } else {
13156                super.mergeFrom(other);
13157                return this;
13158              }
13159            }
13160    
13161            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference other) {
13162              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.getDefaultInstance()) return this;
13163              if (other.hasReferredId()) {
13164                setReferredId(other.getReferredId());
13165              }
13166              if (other.hasName()) {
13167                setName(other.getName());
13168              }
13169              if (other.hasDstSnapshotId()) {
13170                setDstSnapshotId(other.getDstSnapshotId());
13171              }
13172              if (other.hasLastSnapshotId()) {
13173                setLastSnapshotId(other.getLastSnapshotId());
13174              }
13175              this.mergeUnknownFields(other.getUnknownFields());
13176              return this;
13177            }
13178    
13179            public final boolean isInitialized() {
13180              return true;
13181            }
13182    
13183            public Builder mergeFrom(
13184                com.google.protobuf.CodedInputStream input,
13185                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13186                throws java.io.IOException {
13187              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parsedMessage = null;
13188              try {
13189                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13190              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13191                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference) e.getUnfinishedMessage();
13192                throw e;
13193              } finally {
13194                if (parsedMessage != null) {
13195                  mergeFrom(parsedMessage);
13196                }
13197              }
13198              return this;
13199            }
13200            private int bitField0_;
13201    
13202            // optional uint64 referredId = 1;
13203            private long referredId_ ;
13204            /**
13205             * <code>optional uint64 referredId = 1;</code>
13206             *
13207             * <pre>
13208             * id of the referred inode
13209             * </pre>
13210             */
13211            public boolean hasReferredId() {
13212              return ((bitField0_ & 0x00000001) == 0x00000001);
13213            }
13214            /**
13215             * <code>optional uint64 referredId = 1;</code>
13216             *
13217             * <pre>
13218             * id of the referred inode
13219             * </pre>
13220             */
13221            public long getReferredId() {
13222              return referredId_;
13223            }
13224            /**
13225             * <code>optional uint64 referredId = 1;</code>
13226             *
13227             * <pre>
13228             * id of the referred inode
13229             * </pre>
13230             */
13231            public Builder setReferredId(long value) {
13232              bitField0_ |= 0x00000001;
13233              referredId_ = value;
13234              onChanged();
13235              return this;
13236            }
13237            /**
13238             * <code>optional uint64 referredId = 1;</code>
13239             *
13240             * <pre>
13241             * id of the referred inode
13242             * </pre>
13243             */
13244            public Builder clearReferredId() {
13245              bitField0_ = (bitField0_ & ~0x00000001);
13246              referredId_ = 0L;
13247              onChanged();
13248              return this;
13249            }
13250    
13251            // optional bytes name = 2;
13252            private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
13253            /**
13254             * <code>optional bytes name = 2;</code>
13255             *
13256             * <pre>
13257             * local name recorded in WithName
13258             * </pre>
13259             */
13260            public boolean hasName() {
13261              return ((bitField0_ & 0x00000002) == 0x00000002);
13262            }
13263            /**
13264             * <code>optional bytes name = 2;</code>
13265             *
13266             * <pre>
13267             * local name recorded in WithName
13268             * </pre>
13269             */
13270            public com.google.protobuf.ByteString getName() {
13271              return name_;
13272            }
13273            /**
13274             * <code>optional bytes name = 2;</code>
13275             *
13276             * <pre>
13277             * local name recorded in WithName
13278             * </pre>
13279             */
13280            public Builder setName(com.google.protobuf.ByteString value) {
13281              if (value == null) {
13282        throw new NullPointerException();
13283      }
13284      bitField0_ |= 0x00000002;
13285              name_ = value;
13286              onChanged();
13287              return this;
13288            }
13289            /**
13290             * <code>optional bytes name = 2;</code>
13291             *
13292             * <pre>
13293             * local name recorded in WithName
13294             * </pre>
13295             */
13296            public Builder clearName() {
13297              bitField0_ = (bitField0_ & ~0x00000002);
13298              name_ = getDefaultInstance().getName();
13299              onChanged();
13300              return this;
13301            }
13302    
13303            // optional uint32 dstSnapshotId = 3;
13304            private int dstSnapshotId_ ;
13305            /**
13306             * <code>optional uint32 dstSnapshotId = 3;</code>
13307             *
13308             * <pre>
13309             * recorded in DstReference
13310             * </pre>
13311             */
13312            public boolean hasDstSnapshotId() {
13313              return ((bitField0_ & 0x00000004) == 0x00000004);
13314            }
13315            /**
13316             * <code>optional uint32 dstSnapshotId = 3;</code>
13317             *
13318             * <pre>
13319             * recorded in DstReference
13320             * </pre>
13321             */
13322            public int getDstSnapshotId() {
13323              return dstSnapshotId_;
13324            }
13325            /**
13326             * <code>optional uint32 dstSnapshotId = 3;</code>
13327             *
13328             * <pre>
13329             * recorded in DstReference
13330             * </pre>
13331             */
13332            public Builder setDstSnapshotId(int value) {
13333              bitField0_ |= 0x00000004;
13334              dstSnapshotId_ = value;
13335              onChanged();
13336              return this;
13337            }
13338            /**
13339             * <code>optional uint32 dstSnapshotId = 3;</code>
13340             *
13341             * <pre>
13342             * recorded in DstReference
13343             * </pre>
13344             */
13345            public Builder clearDstSnapshotId() {
13346              bitField0_ = (bitField0_ & ~0x00000004);
13347              dstSnapshotId_ = 0;
13348              onChanged();
13349              return this;
13350            }
13351    
13352            // optional uint32 lastSnapshotId = 4;
13353            private int lastSnapshotId_ ;
13354            /**
13355             * <code>optional uint32 lastSnapshotId = 4;</code>
13356             *
13357             * <pre>
13358             * recorded in WithName
13359             * </pre>
13360             */
13361            public boolean hasLastSnapshotId() {
13362              return ((bitField0_ & 0x00000008) == 0x00000008);
13363            }
13364            /**
13365             * <code>optional uint32 lastSnapshotId = 4;</code>
13366             *
13367             * <pre>
13368             * recorded in WithName
13369             * </pre>
13370             */
13371            public int getLastSnapshotId() {
13372              return lastSnapshotId_;
13373            }
13374            /**
13375             * <code>optional uint32 lastSnapshotId = 4;</code>
13376             *
13377             * <pre>
13378             * recorded in WithName
13379             * </pre>
13380             */
13381            public Builder setLastSnapshotId(int value) {
13382              bitField0_ |= 0x00000008;
13383              lastSnapshotId_ = value;
13384              onChanged();
13385              return this;
13386            }
13387            /**
13388             * <code>optional uint32 lastSnapshotId = 4;</code>
13389             *
13390             * <pre>
13391             * recorded in WithName
13392             * </pre>
13393             */
13394            public Builder clearLastSnapshotId() {
13395              bitField0_ = (bitField0_ & ~0x00000008);
13396              lastSnapshotId_ = 0;
13397              onChanged();
13398              return this;
13399            }
13400    
13401            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference)
13402          }
13403    
13404          static {
13405            defaultInstance = new INodeReference(true);
13406            defaultInstance.initFields();
13407          }
13408    
13409          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference)
13410        }
13411    
13412        private void initFields() {
13413        }
13414        private byte memoizedIsInitialized = -1;
13415        public final boolean isInitialized() {
13416          byte isInitialized = memoizedIsInitialized;
13417          if (isInitialized != -1) return isInitialized == 1;
13418    
13419          memoizedIsInitialized = 1;
13420          return true;
13421        }
13422    
13423        public void writeTo(com.google.protobuf.CodedOutputStream output)
13424                            throws java.io.IOException {
13425          getSerializedSize();
13426          getUnknownFields().writeTo(output);
13427        }
13428    
13429        private int memoizedSerializedSize = -1;
13430        public int getSerializedSize() {
13431          int size = memoizedSerializedSize;
13432          if (size != -1) return size;
13433    
13434          size = 0;
13435          size += getUnknownFields().getSerializedSize();
13436          memoizedSerializedSize = size;
13437          return size;
13438        }
13439    
13440        private static final long serialVersionUID = 0L;
13441        @java.lang.Override
13442        protected java.lang.Object writeReplace()
13443            throws java.io.ObjectStreamException {
13444          return super.writeReplace();
13445        }
13446    
13447        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13448            com.google.protobuf.ByteString data)
13449            throws com.google.protobuf.InvalidProtocolBufferException {
13450          return PARSER.parseFrom(data);
13451        }
13452        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13453            com.google.protobuf.ByteString data,
13454            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13455            throws com.google.protobuf.InvalidProtocolBufferException {
13456          return PARSER.parseFrom(data, extensionRegistry);
13457        }
13458        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(byte[] data)
13459            throws com.google.protobuf.InvalidProtocolBufferException {
13460          return PARSER.parseFrom(data);
13461        }
13462        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13463            byte[] data,
13464            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13465            throws com.google.protobuf.InvalidProtocolBufferException {
13466          return PARSER.parseFrom(data, extensionRegistry);
13467        }
13468        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(java.io.InputStream input)
13469            throws java.io.IOException {
13470          return PARSER.parseFrom(input);
13471        }
13472        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13473            java.io.InputStream input,
13474            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13475            throws java.io.IOException {
13476          return PARSER.parseFrom(input, extensionRegistry);
13477        }
13478        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseDelimitedFrom(java.io.InputStream input)
13479            throws java.io.IOException {
13480          return PARSER.parseDelimitedFrom(input);
13481        }
13482        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseDelimitedFrom(
13483            java.io.InputStream input,
13484            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13485            throws java.io.IOException {
13486          return PARSER.parseDelimitedFrom(input, extensionRegistry);
13487        }
13488        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13489            com.google.protobuf.CodedInputStream input)
13490            throws java.io.IOException {
13491          return PARSER.parseFrom(input);
13492        }
13493        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13494            com.google.protobuf.CodedInputStream input,
13495            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13496            throws java.io.IOException {
13497          return PARSER.parseFrom(input, extensionRegistry);
13498        }
13499    
13500        public static Builder newBuilder() { return Builder.create(); }
13501        public Builder newBuilderForType() { return newBuilder(); }
13502        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection prototype) {
13503          return newBuilder().mergeFrom(prototype);
13504        }
13505        public Builder toBuilder() { return newBuilder(this); }
13506    
13507        @java.lang.Override
13508        protected Builder newBuilderForType(
13509            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13510          Builder builder = new Builder(parent);
13511          return builder;
13512        }
13513        /**
13514         * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection}
13515         */
13516        public static final class Builder extends
13517            com.google.protobuf.GeneratedMessage.Builder<Builder>
13518           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSectionOrBuilder {
13519          public static final com.google.protobuf.Descriptors.Descriptor
13520              getDescriptor() {
13521            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
13522          }
13523    
13524          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13525              internalGetFieldAccessorTable() {
13526            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable
13527                .ensureFieldAccessorsInitialized(
13528                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.Builder.class);
13529          }
13530    
13531          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.newBuilder()
13532          private Builder() {
13533            maybeForceBuilderInitialization();
13534          }
13535    
13536          private Builder(
13537              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13538            super(parent);
13539            maybeForceBuilderInitialization();
13540          }
13541          private void maybeForceBuilderInitialization() {
13542            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13543            }
13544          }
13545          private static Builder create() {
13546            return new Builder();
13547          }
13548    
13549          public Builder clear() {
13550            super.clear();
13551            return this;
13552          }
13553    
13554          public Builder clone() {
13555            return create().mergeFrom(buildPartial());
13556          }
13557    
13558          public com.google.protobuf.Descriptors.Descriptor
13559              getDescriptorForType() {
13560            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
13561          }
13562    
13563          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection getDefaultInstanceForType() {
13564            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.getDefaultInstance();
13565          }
13566    
13567          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection build() {
13568            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection result = buildPartial();
13569            if (!result.isInitialized()) {
13570              throw newUninitializedMessageException(result);
13571            }
13572            return result;
13573          }
13574    
13575          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection buildPartial() {
13576            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection(this);
13577            onBuilt();
13578            return result;
13579          }
13580    
13581          public Builder mergeFrom(com.google.protobuf.Message other) {
13582            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection) {
13583              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection)other);
13584            } else {
13585              super.mergeFrom(other);
13586              return this;
13587            }
13588          }
13589    
13590          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection other) {
13591            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.getDefaultInstance()) return this;
13592            this.mergeUnknownFields(other.getUnknownFields());
13593            return this;
13594          }
13595    
13596          public final boolean isInitialized() {
13597            return true;
13598          }
13599    
13600          public Builder mergeFrom(
13601              com.google.protobuf.CodedInputStream input,
13602              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13603              throws java.io.IOException {
13604            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parsedMessage = null;
13605            try {
13606              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13607            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13608              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection) e.getUnfinishedMessage();
13609              throw e;
13610            } finally {
13611              if (parsedMessage != null) {
13612                mergeFrom(parsedMessage);
13613              }
13614            }
13615            return this;
13616          }
13617    
13618          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeReferenceSection)
13619        }
13620    
13621        static {
13622          defaultInstance = new INodeReferenceSection(true);
13623          defaultInstance.initFields();
13624        }
13625    
13626        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeReferenceSection)
13627      }
13628    
13629      public interface SnapshotSectionOrBuilder
13630          extends com.google.protobuf.MessageOrBuilder {
13631    
13632        // optional uint32 snapshotCounter = 1;
13633        /**
13634         * <code>optional uint32 snapshotCounter = 1;</code>
13635         */
13636        boolean hasSnapshotCounter();
13637        /**
13638         * <code>optional uint32 snapshotCounter = 1;</code>
13639         */
13640        int getSnapshotCounter();
13641    
13642        // repeated uint64 snapshottableDir = 2 [packed = true];
13643        /**
13644         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13645         */
13646        java.util.List<java.lang.Long> getSnapshottableDirList();
13647        /**
13648         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13649         */
13650        int getSnapshottableDirCount();
13651        /**
13652         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13653         */
13654        long getSnapshottableDir(int index);
13655    
13656        // optional uint32 numSnapshots = 3;
13657        /**
13658         * <code>optional uint32 numSnapshots = 3;</code>
13659         *
13660         * <pre>
13661         * total number of snapshots
13662         * </pre>
13663         */
13664        boolean hasNumSnapshots();
13665        /**
13666         * <code>optional uint32 numSnapshots = 3;</code>
13667         *
13668         * <pre>
13669         * total number of snapshots
13670         * </pre>
13671         */
13672        int getNumSnapshots();
13673      }
13674      /**
13675       * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection}
13676       *
13677       * <pre>
13678       **
13679       * This section records the information about snapshot
13680       * NAME: SNAPSHOT
13681       * </pre>
13682       */
13683      public static final class SnapshotSection extends
13684          com.google.protobuf.GeneratedMessage
13685          implements SnapshotSectionOrBuilder {
13686        // Use SnapshotSection.newBuilder() to construct.
13687        private SnapshotSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13688          super(builder);
13689          this.unknownFields = builder.getUnknownFields();
13690        }
13691        private SnapshotSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13692    
13693        private static final SnapshotSection defaultInstance;
13694        public static SnapshotSection getDefaultInstance() {
13695          return defaultInstance;
13696        }
13697    
13698        public SnapshotSection getDefaultInstanceForType() {
13699          return defaultInstance;
13700        }
13701    
13702        private final com.google.protobuf.UnknownFieldSet unknownFields;
13703        @java.lang.Override
13704        public final com.google.protobuf.UnknownFieldSet
13705            getUnknownFields() {
13706          return this.unknownFields;
13707        }
13708        private SnapshotSection(
13709            com.google.protobuf.CodedInputStream input,
13710            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13711            throws com.google.protobuf.InvalidProtocolBufferException {
13712          initFields();
13713          int mutable_bitField0_ = 0;
13714          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13715              com.google.protobuf.UnknownFieldSet.newBuilder();
13716          try {
13717            boolean done = false;
13718            while (!done) {
13719              int tag = input.readTag();
13720              switch (tag) {
13721                case 0:
13722                  done = true;
13723                  break;
13724                default: {
13725                  if (!parseUnknownField(input, unknownFields,
13726                                         extensionRegistry, tag)) {
13727                    done = true;
13728                  }
13729                  break;
13730                }
13731                case 8: {
13732                  bitField0_ |= 0x00000001;
13733                  snapshotCounter_ = input.readUInt32();
13734                  break;
13735                }
13736                case 16: {
13737                  if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
13738                    snapshottableDir_ = new java.util.ArrayList<java.lang.Long>();
13739                    mutable_bitField0_ |= 0x00000002;
13740                  }
13741                  snapshottableDir_.add(input.readUInt64());
13742                  break;
13743                }
13744                case 18: {
13745                  int length = input.readRawVarint32();
13746                  int limit = input.pushLimit(length);
13747                  if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) {
13748                    snapshottableDir_ = new java.util.ArrayList<java.lang.Long>();
13749                    mutable_bitField0_ |= 0x00000002;
13750                  }
13751                  while (input.getBytesUntilLimit() > 0) {
13752                    snapshottableDir_.add(input.readUInt64());
13753                  }
13754                  input.popLimit(limit);
13755                  break;
13756                }
13757                case 24: {
13758                  bitField0_ |= 0x00000002;
13759                  numSnapshots_ = input.readUInt32();
13760                  break;
13761                }
13762              }
13763            }
13764          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13765            throw e.setUnfinishedMessage(this);
13766          } catch (java.io.IOException e) {
13767            throw new com.google.protobuf.InvalidProtocolBufferException(
13768                e.getMessage()).setUnfinishedMessage(this);
13769          } finally {
13770            if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
13771              snapshottableDir_ = java.util.Collections.unmodifiableList(snapshottableDir_);
13772            }
13773            this.unknownFields = unknownFields.build();
13774            makeExtensionsImmutable();
13775          }
13776        }
13777        public static final com.google.protobuf.Descriptors.Descriptor
13778            getDescriptor() {
13779          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
13780        }
13781    
13782        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13783            internalGetFieldAccessorTable() {
13784          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable
13785              .ensureFieldAccessorsInitialized(
13786                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Builder.class);
13787        }
13788    
13789        public static com.google.protobuf.Parser<SnapshotSection> PARSER =
13790            new com.google.protobuf.AbstractParser<SnapshotSection>() {
13791          public SnapshotSection parsePartialFrom(
13792              com.google.protobuf.CodedInputStream input,
13793              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13794              throws com.google.protobuf.InvalidProtocolBufferException {
13795            return new SnapshotSection(input, extensionRegistry);
13796          }
13797        };
13798    
13799        @java.lang.Override
13800        public com.google.protobuf.Parser<SnapshotSection> getParserForType() {
13801          return PARSER;
13802        }
13803    
13804        public interface SnapshotOrBuilder
13805            extends com.google.protobuf.MessageOrBuilder {
13806    
13807          // optional uint32 snapshotId = 1;
13808          /**
13809           * <code>optional uint32 snapshotId = 1;</code>
13810           */
13811          boolean hasSnapshotId();
13812          /**
13813           * <code>optional uint32 snapshotId = 1;</code>
13814           */
13815          int getSnapshotId();
13816    
13817          // optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;
13818          /**
13819           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13820           *
13821           * <pre>
13822           * Snapshot root
13823           * </pre>
13824           */
13825          boolean hasRoot();
13826          /**
13827           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13828           *
13829           * <pre>
13830           * Snapshot root
13831           * </pre>
13832           */
13833          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getRoot();
13834          /**
13835           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13836           *
13837           * <pre>
13838           * Snapshot root
13839           * </pre>
13840           */
13841          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder getRootOrBuilder();
13842        }
13843        /**
13844         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection.Snapshot}
13845         */
13846        public static final class Snapshot extends
13847            com.google.protobuf.GeneratedMessage
13848            implements SnapshotOrBuilder {
13849          // Use Snapshot.newBuilder() to construct.
13850          private Snapshot(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13851            super(builder);
13852            this.unknownFields = builder.getUnknownFields();
13853          }
13854          private Snapshot(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13855    
13856          private static final Snapshot defaultInstance;
13857          public static Snapshot getDefaultInstance() {
13858            return defaultInstance;
13859          }
13860    
13861          public Snapshot getDefaultInstanceForType() {
13862            return defaultInstance;
13863          }
13864    
13865          private final com.google.protobuf.UnknownFieldSet unknownFields;
13866          @java.lang.Override
13867          public final com.google.protobuf.UnknownFieldSet
13868              getUnknownFields() {
13869            return this.unknownFields;
13870          }
13871          private Snapshot(
13872              com.google.protobuf.CodedInputStream input,
13873              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13874              throws com.google.protobuf.InvalidProtocolBufferException {
13875            initFields();
13876            int mutable_bitField0_ = 0;
13877            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13878                com.google.protobuf.UnknownFieldSet.newBuilder();
13879            try {
13880              boolean done = false;
13881              while (!done) {
13882                int tag = input.readTag();
13883                switch (tag) {
13884                  case 0:
13885                    done = true;
13886                    break;
13887                  default: {
13888                    if (!parseUnknownField(input, unknownFields,
13889                                           extensionRegistry, tag)) {
13890                      done = true;
13891                    }
13892                    break;
13893                  }
13894                  case 8: {
13895                    bitField0_ |= 0x00000001;
13896                    snapshotId_ = input.readUInt32();
13897                    break;
13898                  }
13899                  case 18: {
13900                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder subBuilder = null;
13901                    if (((bitField0_ & 0x00000002) == 0x00000002)) {
13902                      subBuilder = root_.toBuilder();
13903                    }
13904                    root_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.PARSER, extensionRegistry);
13905                    if (subBuilder != null) {
13906                      subBuilder.mergeFrom(root_);
13907                      root_ = subBuilder.buildPartial();
13908                    }
13909                    bitField0_ |= 0x00000002;
13910                    break;
13911                  }
13912                }
13913              }
13914            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13915              throw e.setUnfinishedMessage(this);
13916            } catch (java.io.IOException e) {
13917              throw new com.google.protobuf.InvalidProtocolBufferException(
13918                  e.getMessage()).setUnfinishedMessage(this);
13919            } finally {
13920              this.unknownFields = unknownFields.build();
13921              makeExtensionsImmutable();
13922            }
13923          }
13924          public static final com.google.protobuf.Descriptors.Descriptor
13925              getDescriptor() {
13926            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
13927          }
13928    
13929          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13930              internalGetFieldAccessorTable() {
13931            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable
13932                .ensureFieldAccessorsInitialized(
13933                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.Builder.class);
13934          }
13935    
13936          public static com.google.protobuf.Parser<Snapshot> PARSER =
13937              new com.google.protobuf.AbstractParser<Snapshot>() {
13938            public Snapshot parsePartialFrom(
13939                com.google.protobuf.CodedInputStream input,
13940                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13941                throws com.google.protobuf.InvalidProtocolBufferException {
13942              return new Snapshot(input, extensionRegistry);
13943            }
13944          };
13945    
13946          @java.lang.Override
13947          public com.google.protobuf.Parser<Snapshot> getParserForType() {
13948            return PARSER;
13949          }
13950    
13951          private int bitField0_;
13952          // optional uint32 snapshotId = 1;
13953          public static final int SNAPSHOTID_FIELD_NUMBER = 1;
13954          private int snapshotId_;
13955          /**
13956           * <code>optional uint32 snapshotId = 1;</code>
13957           */
13958          public boolean hasSnapshotId() {
13959            return ((bitField0_ & 0x00000001) == 0x00000001);
13960          }
13961          /**
13962           * <code>optional uint32 snapshotId = 1;</code>
13963           */
13964          public int getSnapshotId() {
13965            return snapshotId_;
13966          }
13967    
13968          // optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;
13969          public static final int ROOT_FIELD_NUMBER = 2;
13970          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode root_;
13971          /**
13972           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13973           *
13974           * <pre>
13975           * Snapshot root
13976           * </pre>
13977           */
13978          public boolean hasRoot() {
13979            return ((bitField0_ & 0x00000002) == 0x00000002);
13980          }
13981          /**
13982           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13983           *
13984           * <pre>
13985           * Snapshot root
13986           * </pre>
13987           */
13988          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getRoot() {
13989            return root_;
13990          }
13991          /**
13992           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13993           *
13994           * <pre>
13995           * Snapshot root
13996           * </pre>
13997           */
13998          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder getRootOrBuilder() {
13999            return root_;
14000          }
14001    
14002          private void initFields() {
14003            snapshotId_ = 0;
14004            root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
14005          }
14006          private byte memoizedIsInitialized = -1;
14007          public final boolean isInitialized() {
14008            byte isInitialized = memoizedIsInitialized;
14009            if (isInitialized != -1) return isInitialized == 1;
14010    
14011            if (hasRoot()) {
14012              if (!getRoot().isInitialized()) {
14013                memoizedIsInitialized = 0;
14014                return false;
14015              }
14016            }
14017            memoizedIsInitialized = 1;
14018            return true;
14019          }
14020    
14021          public void writeTo(com.google.protobuf.CodedOutputStream output)
14022                              throws java.io.IOException {
14023            getSerializedSize();
14024            if (((bitField0_ & 0x00000001) == 0x00000001)) {
14025              output.writeUInt32(1, snapshotId_);
14026            }
14027            if (((bitField0_ & 0x00000002) == 0x00000002)) {
14028              output.writeMessage(2, root_);
14029            }
14030            getUnknownFields().writeTo(output);
14031          }
14032    
14033          private int memoizedSerializedSize = -1;
14034          public int getSerializedSize() {
14035            int size = memoizedSerializedSize;
14036            if (size != -1) return size;
14037    
14038            size = 0;
14039            if (((bitField0_ & 0x00000001) == 0x00000001)) {
14040              size += com.google.protobuf.CodedOutputStream
14041                .computeUInt32Size(1, snapshotId_);
14042            }
14043            if (((bitField0_ & 0x00000002) == 0x00000002)) {
14044              size += com.google.protobuf.CodedOutputStream
14045                .computeMessageSize(2, root_);
14046            }
14047            size += getUnknownFields().getSerializedSize();
14048            memoizedSerializedSize = size;
14049            return size;
14050          }
14051    
14052          private static final long serialVersionUID = 0L;
14053          @java.lang.Override
14054          protected java.lang.Object writeReplace()
14055              throws java.io.ObjectStreamException {
14056            return super.writeReplace();
14057          }
14058    
14059          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
14060              com.google.protobuf.ByteString data)
14061              throws com.google.protobuf.InvalidProtocolBufferException {
14062            return PARSER.parseFrom(data);
14063          }
14064          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
14065              com.google.protobuf.ByteString data,
14066              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14067              throws com.google.protobuf.InvalidProtocolBufferException {
14068            return PARSER.parseFrom(data, extensionRegistry);
14069          }
14070          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(byte[] data)
14071              throws com.google.protobuf.InvalidProtocolBufferException {
14072            return PARSER.parseFrom(data);
14073          }
14074          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
14075              byte[] data,
14076              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14077              throws com.google.protobuf.InvalidProtocolBufferException {
14078            return PARSER.parseFrom(data, extensionRegistry);
14079          }
14080          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(java.io.InputStream input)
14081              throws java.io.IOException {
14082            return PARSER.parseFrom(input);
14083          }
14084          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
14085              java.io.InputStream input,
14086              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14087              throws java.io.IOException {
14088            return PARSER.parseFrom(input, extensionRegistry);
14089          }
14090          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseDelimitedFrom(java.io.InputStream input)
14091              throws java.io.IOException {
14092            return PARSER.parseDelimitedFrom(input);
14093          }
14094          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseDelimitedFrom(
14095              java.io.InputStream input,
14096              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14097              throws java.io.IOException {
14098            return PARSER.parseDelimitedFrom(input, extensionRegistry);
14099          }
14100          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
14101              com.google.protobuf.CodedInputStream input)
14102              throws java.io.IOException {
14103            return PARSER.parseFrom(input);
14104          }
14105          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
14106              com.google.protobuf.CodedInputStream input,
14107              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14108              throws java.io.IOException {
14109            return PARSER.parseFrom(input, extensionRegistry);
14110          }
14111    
14112          public static Builder newBuilder() { return Builder.create(); }
14113          public Builder newBuilderForType() { return newBuilder(); }
14114          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot prototype) {
14115            return newBuilder().mergeFrom(prototype);
14116          }
14117          public Builder toBuilder() { return newBuilder(this); }
14118    
14119          @java.lang.Override
14120          protected Builder newBuilderForType(
14121              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14122            Builder builder = new Builder(parent);
14123            return builder;
14124          }
14125          /**
14126           * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection.Snapshot}
14127           */
14128          public static final class Builder extends
14129              com.google.protobuf.GeneratedMessage.Builder<Builder>
14130             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.SnapshotOrBuilder {
14131            public static final com.google.protobuf.Descriptors.Descriptor
14132                getDescriptor() {
14133              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
14134            }
14135    
14136            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14137                internalGetFieldAccessorTable() {
14138              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable
14139                  .ensureFieldAccessorsInitialized(
14140                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.Builder.class);
14141            }
14142    
14143            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.newBuilder()
14144            private Builder() {
14145              maybeForceBuilderInitialization();
14146            }
14147    
14148            private Builder(
14149                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14150              super(parent);
14151              maybeForceBuilderInitialization();
14152            }
14153            private void maybeForceBuilderInitialization() {
14154              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14155                getRootFieldBuilder();
14156              }
14157            }
14158            private static Builder create() {
14159              return new Builder();
14160            }
14161    
14162            public Builder clear() {
14163              super.clear();
14164              snapshotId_ = 0;
14165              bitField0_ = (bitField0_ & ~0x00000001);
14166              if (rootBuilder_ == null) {
14167                root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
14168              } else {
14169                rootBuilder_.clear();
14170              }
14171              bitField0_ = (bitField0_ & ~0x00000002);
14172              return this;
14173            }
14174    
14175            public Builder clone() {
14176              return create().mergeFrom(buildPartial());
14177            }
14178    
14179            public com.google.protobuf.Descriptors.Descriptor
14180                getDescriptorForType() {
14181              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
14182            }
14183    
14184            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot getDefaultInstanceForType() {
14185              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.getDefaultInstance();
14186            }
14187    
14188            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot build() {
14189              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot result = buildPartial();
14190              if (!result.isInitialized()) {
14191                throw newUninitializedMessageException(result);
14192              }
14193              return result;
14194            }
14195    
14196            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot buildPartial() {
14197              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot(this);
14198              int from_bitField0_ = bitField0_;
14199              int to_bitField0_ = 0;
14200              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
14201                to_bitField0_ |= 0x00000001;
14202              }
14203              result.snapshotId_ = snapshotId_;
14204              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
14205                to_bitField0_ |= 0x00000002;
14206              }
14207              if (rootBuilder_ == null) {
14208                result.root_ = root_;
14209              } else {
14210                result.root_ = rootBuilder_.build();
14211              }
14212              result.bitField0_ = to_bitField0_;
14213              onBuilt();
14214              return result;
14215            }
14216    
14217            public Builder mergeFrom(com.google.protobuf.Message other) {
14218              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot) {
14219                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot)other);
14220              } else {
14221                super.mergeFrom(other);
14222                return this;
14223              }
14224            }
14225    
14226            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot other) {
14227              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.getDefaultInstance()) return this;
14228              if (other.hasSnapshotId()) {
14229                setSnapshotId(other.getSnapshotId());
14230              }
14231              if (other.hasRoot()) {
14232                mergeRoot(other.getRoot());
14233              }
14234              this.mergeUnknownFields(other.getUnknownFields());
14235              return this;
14236            }
14237    
14238            public final boolean isInitialized() {
14239              if (hasRoot()) {
14240                if (!getRoot().isInitialized()) {
14241                  
14242                  return false;
14243                }
14244              }
14245              return true;
14246            }
14247    
14248            public Builder mergeFrom(
14249                com.google.protobuf.CodedInputStream input,
14250                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14251                throws java.io.IOException {
14252              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parsedMessage = null;
14253              try {
14254                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
14255              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14256                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot) e.getUnfinishedMessage();
14257                throw e;
14258              } finally {
14259                if (parsedMessage != null) {
14260                  mergeFrom(parsedMessage);
14261                }
14262              }
14263              return this;
14264            }
14265            private int bitField0_;
14266    
14267            // optional uint32 snapshotId = 1;
14268            private int snapshotId_ ;
14269            /**
14270             * <code>optional uint32 snapshotId = 1;</code>
14271             */
14272            public boolean hasSnapshotId() {
14273              return ((bitField0_ & 0x00000001) == 0x00000001);
14274            }
14275            /**
14276             * <code>optional uint32 snapshotId = 1;</code>
14277             */
14278            public int getSnapshotId() {
14279              return snapshotId_;
14280            }
14281            /**
14282             * <code>optional uint32 snapshotId = 1;</code>
14283             */
14284            public Builder setSnapshotId(int value) {
14285              bitField0_ |= 0x00000001;
14286              snapshotId_ = value;
14287              onChanged();
14288              return this;
14289            }
14290            /**
14291             * <code>optional uint32 snapshotId = 1;</code>
14292             */
14293            public Builder clearSnapshotId() {
14294              bitField0_ = (bitField0_ & ~0x00000001);
14295              snapshotId_ = 0;
14296              onChanged();
14297              return this;
14298            }
14299    
14300            // optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;
14301            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
14302            private com.google.protobuf.SingleFieldBuilder<
14303                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder> rootBuilder_;
14304            /**
14305             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14306             *
14307             * <pre>
14308             * Snapshot root
14309             * </pre>
14310             */
14311            public boolean hasRoot() {
14312              return ((bitField0_ & 0x00000002) == 0x00000002);
14313            }
14314            /**
14315             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14316             *
14317             * <pre>
14318             * Snapshot root
14319             * </pre>
14320             */
14321            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getRoot() {
14322              if (rootBuilder_ == null) {
14323                return root_;
14324              } else {
14325                return rootBuilder_.getMessage();
14326              }
14327            }
14328            /**
14329             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14330             *
14331             * <pre>
14332             * Snapshot root
14333             * </pre>
14334             */
14335            public Builder setRoot(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode value) {
14336              if (rootBuilder_ == null) {
14337                if (value == null) {
14338                  throw new NullPointerException();
14339                }
14340                root_ = value;
14341                onChanged();
14342              } else {
14343                rootBuilder_.setMessage(value);
14344              }
14345              bitField0_ |= 0x00000002;
14346              return this;
14347            }
14348            /**
14349             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14350             *
14351             * <pre>
14352             * Snapshot root
14353             * </pre>
14354             */
14355            public Builder setRoot(
14356                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder builderForValue) {
14357              if (rootBuilder_ == null) {
14358                root_ = builderForValue.build();
14359                onChanged();
14360              } else {
14361                rootBuilder_.setMessage(builderForValue.build());
14362              }
14363              bitField0_ |= 0x00000002;
14364              return this;
14365            }
14366            /**
14367             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14368             *
14369             * <pre>
14370             * Snapshot root
14371             * </pre>
14372             */
14373            public Builder mergeRoot(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode value) {
14374              if (rootBuilder_ == null) {
14375                if (((bitField0_ & 0x00000002) == 0x00000002) &&
14376                    root_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance()) {
14377                  root_ =
14378                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.newBuilder(root_).mergeFrom(value).buildPartial();
14379                } else {
14380                  root_ = value;
14381                }
14382                onChanged();
14383              } else {
14384                rootBuilder_.mergeFrom(value);
14385              }
14386              bitField0_ |= 0x00000002;
14387              return this;
14388            }
14389            /**
14390             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14391             *
14392             * <pre>
14393             * Snapshot root
14394             * </pre>
14395             */
14396            public Builder clearRoot() {
14397              if (rootBuilder_ == null) {
14398                root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
14399                onChanged();
14400              } else {
14401                rootBuilder_.clear();
14402              }
14403              bitField0_ = (bitField0_ & ~0x00000002);
14404              return this;
14405            }
14406            /**
14407             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14408             *
14409             * <pre>
14410             * Snapshot root
14411             * </pre>
14412             */
14413            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder getRootBuilder() {
14414              bitField0_ |= 0x00000002;
14415              onChanged();
14416              return getRootFieldBuilder().getBuilder();
14417            }
14418            /**
14419             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14420             *
14421             * <pre>
14422             * Snapshot root
14423             * </pre>
14424             */
14425            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder getRootOrBuilder() {
14426              if (rootBuilder_ != null) {
14427                return rootBuilder_.getMessageOrBuilder();
14428              } else {
14429                return root_;
14430              }
14431            }
14432            /**
14433             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14434             *
14435             * <pre>
14436             * Snapshot root
14437             * </pre>
14438             */
14439            private com.google.protobuf.SingleFieldBuilder<
14440                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder> 
14441                getRootFieldBuilder() {
14442              if (rootBuilder_ == null) {
14443                rootBuilder_ = new com.google.protobuf.SingleFieldBuilder<
14444                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder>(
14445                        root_,
14446                        getParentForChildren(),
14447                        isClean());
14448                root_ = null;
14449              }
14450              return rootBuilder_;
14451            }
14452    
14453            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotSection.Snapshot)
14454          }
14455    
14456          static {
14457            defaultInstance = new Snapshot(true);
14458            defaultInstance.initFields();
14459          }
14460    
14461          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotSection.Snapshot)
14462        }
14463    
14464        private int bitField0_;
14465        // optional uint32 snapshotCounter = 1;
14466        public static final int SNAPSHOTCOUNTER_FIELD_NUMBER = 1;
14467        private int snapshotCounter_;
14468        /**
14469         * <code>optional uint32 snapshotCounter = 1;</code>
14470         */
14471        public boolean hasSnapshotCounter() {
14472          return ((bitField0_ & 0x00000001) == 0x00000001);
14473        }
14474        /**
14475         * <code>optional uint32 snapshotCounter = 1;</code>
14476         */
14477        public int getSnapshotCounter() {
14478          return snapshotCounter_;
14479        }
14480    
14481        // repeated uint64 snapshottableDir = 2 [packed = true];
14482        public static final int SNAPSHOTTABLEDIR_FIELD_NUMBER = 2;
14483        private java.util.List<java.lang.Long> snapshottableDir_;
14484        /**
14485         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14486         */
14487        public java.util.List<java.lang.Long>
14488            getSnapshottableDirList() {
14489          return snapshottableDir_;
14490        }
14491        /**
14492         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14493         */
14494        public int getSnapshottableDirCount() {
14495          return snapshottableDir_.size();
14496        }
14497        /**
14498         * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14499         */
14500        public long getSnapshottableDir(int index) {
14501          return snapshottableDir_.get(index);
14502        }
14503        private int snapshottableDirMemoizedSerializedSize = -1;
14504    
14505        // optional uint32 numSnapshots = 3;
14506        public static final int NUMSNAPSHOTS_FIELD_NUMBER = 3;
14507        private int numSnapshots_;
14508        /**
14509         * <code>optional uint32 numSnapshots = 3;</code>
14510         *
14511         * <pre>
14512         * total number of snapshots
14513         * </pre>
14514         */
14515        public boolean hasNumSnapshots() {
14516          return ((bitField0_ & 0x00000002) == 0x00000002);
14517        }
14518        /**
14519         * <code>optional uint32 numSnapshots = 3;</code>
14520         *
14521         * <pre>
14522         * total number of snapshots
14523         * </pre>
14524         */
14525        public int getNumSnapshots() {
14526          return numSnapshots_;
14527        }
14528    
14529        private void initFields() {
14530          snapshotCounter_ = 0;
14531          snapshottableDir_ = java.util.Collections.emptyList();
14532          numSnapshots_ = 0;
14533        }
14534        private byte memoizedIsInitialized = -1;
14535        public final boolean isInitialized() {
14536          byte isInitialized = memoizedIsInitialized;
14537          if (isInitialized != -1) return isInitialized == 1;
14538    
14539          memoizedIsInitialized = 1;
14540          return true;
14541        }
14542    
14543        public void writeTo(com.google.protobuf.CodedOutputStream output)
14544                            throws java.io.IOException {
14545          getSerializedSize();
14546          if (((bitField0_ & 0x00000001) == 0x00000001)) {
14547            output.writeUInt32(1, snapshotCounter_);
14548          }
14549          if (getSnapshottableDirList().size() > 0) {
14550            output.writeRawVarint32(18);
14551            output.writeRawVarint32(snapshottableDirMemoizedSerializedSize);
14552          }
14553          for (int i = 0; i < snapshottableDir_.size(); i++) {
14554            output.writeUInt64NoTag(snapshottableDir_.get(i));
14555          }
14556          if (((bitField0_ & 0x00000002) == 0x00000002)) {
14557            output.writeUInt32(3, numSnapshots_);
14558          }
14559          getUnknownFields().writeTo(output);
14560        }
14561    
14562        private int memoizedSerializedSize = -1;
14563        public int getSerializedSize() {
14564          int size = memoizedSerializedSize;
14565          if (size != -1) return size;
14566    
14567          size = 0;
14568          if (((bitField0_ & 0x00000001) == 0x00000001)) {
14569            size += com.google.protobuf.CodedOutputStream
14570              .computeUInt32Size(1, snapshotCounter_);
14571          }
14572          {
14573            int dataSize = 0;
14574            for (int i = 0; i < snapshottableDir_.size(); i++) {
14575              dataSize += com.google.protobuf.CodedOutputStream
14576                .computeUInt64SizeNoTag(snapshottableDir_.get(i));
14577            }
14578            size += dataSize;
14579            if (!getSnapshottableDirList().isEmpty()) {
14580              size += 1;
14581              size += com.google.protobuf.CodedOutputStream
14582                  .computeInt32SizeNoTag(dataSize);
14583            }
14584            snapshottableDirMemoizedSerializedSize = dataSize;
14585          }
14586          if (((bitField0_ & 0x00000002) == 0x00000002)) {
14587            size += com.google.protobuf.CodedOutputStream
14588              .computeUInt32Size(3, numSnapshots_);
14589          }
14590          size += getUnknownFields().getSerializedSize();
14591          memoizedSerializedSize = size;
14592          return size;
14593        }
14594    
14595        private static final long serialVersionUID = 0L;
14596        @java.lang.Override
14597        protected java.lang.Object writeReplace()
14598            throws java.io.ObjectStreamException {
14599          return super.writeReplace();
14600        }
14601    
14602        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14603            com.google.protobuf.ByteString data)
14604            throws com.google.protobuf.InvalidProtocolBufferException {
14605          return PARSER.parseFrom(data);
14606        }
14607        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14608            com.google.protobuf.ByteString data,
14609            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14610            throws com.google.protobuf.InvalidProtocolBufferException {
14611          return PARSER.parseFrom(data, extensionRegistry);
14612        }
14613        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(byte[] data)
14614            throws com.google.protobuf.InvalidProtocolBufferException {
14615          return PARSER.parseFrom(data);
14616        }
14617        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14618            byte[] data,
14619            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14620            throws com.google.protobuf.InvalidProtocolBufferException {
14621          return PARSER.parseFrom(data, extensionRegistry);
14622        }
14623        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(java.io.InputStream input)
14624            throws java.io.IOException {
14625          return PARSER.parseFrom(input);
14626        }
14627        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14628            java.io.InputStream input,
14629            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14630            throws java.io.IOException {
14631          return PARSER.parseFrom(input, extensionRegistry);
14632        }
14633        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseDelimitedFrom(java.io.InputStream input)
14634            throws java.io.IOException {
14635          return PARSER.parseDelimitedFrom(input);
14636        }
14637        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseDelimitedFrom(
14638            java.io.InputStream input,
14639            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14640            throws java.io.IOException {
14641          return PARSER.parseDelimitedFrom(input, extensionRegistry);
14642        }
14643        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14644            com.google.protobuf.CodedInputStream input)
14645            throws java.io.IOException {
14646          return PARSER.parseFrom(input);
14647        }
14648        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14649            com.google.protobuf.CodedInputStream input,
14650            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14651            throws java.io.IOException {
14652          return PARSER.parseFrom(input, extensionRegistry);
14653        }
14654    
14655        public static Builder newBuilder() { return Builder.create(); }
14656        public Builder newBuilderForType() { return newBuilder(); }
14657        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection prototype) {
14658          return newBuilder().mergeFrom(prototype);
14659        }
14660        public Builder toBuilder() { return newBuilder(this); }
14661    
14662        @java.lang.Override
14663        protected Builder newBuilderForType(
14664            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14665          Builder builder = new Builder(parent);
14666          return builder;
14667        }
14668        /**
14669         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection}
14670         *
14671         * <pre>
14672         **
14673         * This section records the information about snapshot
14674         * NAME: SNAPSHOT
14675         * </pre>
14676         */
14677        public static final class Builder extends
14678            com.google.protobuf.GeneratedMessage.Builder<Builder>
14679           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSectionOrBuilder {
14680          public static final com.google.protobuf.Descriptors.Descriptor
14681              getDescriptor() {
14682            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
14683          }
14684    
14685          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14686              internalGetFieldAccessorTable() {
14687            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable
14688                .ensureFieldAccessorsInitialized(
14689                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Builder.class);
14690          }
14691    
14692          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.newBuilder()
14693          private Builder() {
14694            maybeForceBuilderInitialization();
14695          }
14696    
14697          private Builder(
14698              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14699            super(parent);
14700            maybeForceBuilderInitialization();
14701          }
14702          private void maybeForceBuilderInitialization() {
14703            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14704            }
14705          }
14706          private static Builder create() {
14707            return new Builder();
14708          }
14709    
14710          public Builder clear() {
14711            super.clear();
14712            snapshotCounter_ = 0;
14713            bitField0_ = (bitField0_ & ~0x00000001);
14714            snapshottableDir_ = java.util.Collections.emptyList();
14715            bitField0_ = (bitField0_ & ~0x00000002);
14716            numSnapshots_ = 0;
14717            bitField0_ = (bitField0_ & ~0x00000004);
14718            return this;
14719          }
14720    
14721          public Builder clone() {
14722            return create().mergeFrom(buildPartial());
14723          }
14724    
14725          public com.google.protobuf.Descriptors.Descriptor
14726              getDescriptorForType() {
14727            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
14728          }
14729    
14730          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection getDefaultInstanceForType() {
14731            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.getDefaultInstance();
14732          }
14733    
14734          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection build() {
14735            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection result = buildPartial();
14736            if (!result.isInitialized()) {
14737              throw newUninitializedMessageException(result);
14738            }
14739            return result;
14740          }
14741    
14742          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection buildPartial() {
14743            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection(this);
14744            int from_bitField0_ = bitField0_;
14745            int to_bitField0_ = 0;
14746            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
14747              to_bitField0_ |= 0x00000001;
14748            }
14749            result.snapshotCounter_ = snapshotCounter_;
14750            if (((bitField0_ & 0x00000002) == 0x00000002)) {
14751              snapshottableDir_ = java.util.Collections.unmodifiableList(snapshottableDir_);
14752              bitField0_ = (bitField0_ & ~0x00000002);
14753            }
14754            result.snapshottableDir_ = snapshottableDir_;
14755            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
14756              to_bitField0_ |= 0x00000002;
14757            }
14758            result.numSnapshots_ = numSnapshots_;
14759            result.bitField0_ = to_bitField0_;
14760            onBuilt();
14761            return result;
14762          }
14763    
14764          public Builder mergeFrom(com.google.protobuf.Message other) {
14765            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection) {
14766              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection)other);
14767            } else {
14768              super.mergeFrom(other);
14769              return this;
14770            }
14771          }
14772    
14773          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection other) {
14774            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.getDefaultInstance()) return this;
14775            if (other.hasSnapshotCounter()) {
14776              setSnapshotCounter(other.getSnapshotCounter());
14777            }
14778            if (!other.snapshottableDir_.isEmpty()) {
14779              if (snapshottableDir_.isEmpty()) {
14780                snapshottableDir_ = other.snapshottableDir_;
14781                bitField0_ = (bitField0_ & ~0x00000002);
14782              } else {
14783                ensureSnapshottableDirIsMutable();
14784                snapshottableDir_.addAll(other.snapshottableDir_);
14785              }
14786              onChanged();
14787            }
14788            if (other.hasNumSnapshots()) {
14789              setNumSnapshots(other.getNumSnapshots());
14790            }
14791            this.mergeUnknownFields(other.getUnknownFields());
14792            return this;
14793          }
14794    
14795          public final boolean isInitialized() {
14796            return true;
14797          }
14798    
14799          public Builder mergeFrom(
14800              com.google.protobuf.CodedInputStream input,
14801              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14802              throws java.io.IOException {
14803            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parsedMessage = null;
14804            try {
14805              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
14806            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14807              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection) e.getUnfinishedMessage();
14808              throw e;
14809            } finally {
14810              if (parsedMessage != null) {
14811                mergeFrom(parsedMessage);
14812              }
14813            }
14814            return this;
14815          }
14816          private int bitField0_;
14817    
14818          // optional uint32 snapshotCounter = 1;
14819          private int snapshotCounter_ ;
14820          /**
14821           * <code>optional uint32 snapshotCounter = 1;</code>
14822           */
14823          public boolean hasSnapshotCounter() {
14824            return ((bitField0_ & 0x00000001) == 0x00000001);
14825          }
14826          /**
14827           * <code>optional uint32 snapshotCounter = 1;</code>
14828           */
14829          public int getSnapshotCounter() {
14830            return snapshotCounter_;
14831          }
14832          /**
14833           * <code>optional uint32 snapshotCounter = 1;</code>
14834           */
14835          public Builder setSnapshotCounter(int value) {
14836            bitField0_ |= 0x00000001;
14837            snapshotCounter_ = value;
14838            onChanged();
14839            return this;
14840          }
14841          /**
14842           * <code>optional uint32 snapshotCounter = 1;</code>
14843           */
14844          public Builder clearSnapshotCounter() {
14845            bitField0_ = (bitField0_ & ~0x00000001);
14846            snapshotCounter_ = 0;
14847            onChanged();
14848            return this;
14849          }
14850    
14851          // repeated uint64 snapshottableDir = 2 [packed = true];
14852          private java.util.List<java.lang.Long> snapshottableDir_ = java.util.Collections.emptyList();
14853          private void ensureSnapshottableDirIsMutable() {
14854            if (!((bitField0_ & 0x00000002) == 0x00000002)) {
14855              snapshottableDir_ = new java.util.ArrayList<java.lang.Long>(snapshottableDir_);
14856              bitField0_ |= 0x00000002;
14857             }
14858          }
14859          /**
14860           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14861           */
14862          public java.util.List<java.lang.Long>
14863              getSnapshottableDirList() {
14864            return java.util.Collections.unmodifiableList(snapshottableDir_);
14865          }
14866          /**
14867           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14868           */
14869          public int getSnapshottableDirCount() {
14870            return snapshottableDir_.size();
14871          }
14872          /**
14873           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14874           */
14875          public long getSnapshottableDir(int index) {
14876            return snapshottableDir_.get(index);
14877          }
14878          /**
14879           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14880           */
14881          public Builder setSnapshottableDir(
14882              int index, long value) {
14883            ensureSnapshottableDirIsMutable();
14884            snapshottableDir_.set(index, value);
14885            onChanged();
14886            return this;
14887          }
14888          /**
14889           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14890           */
14891          public Builder addSnapshottableDir(long value) {
14892            ensureSnapshottableDirIsMutable();
14893            snapshottableDir_.add(value);
14894            onChanged();
14895            return this;
14896          }
14897          /**
14898           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14899           */
14900          public Builder addAllSnapshottableDir(
14901              java.lang.Iterable<? extends java.lang.Long> values) {
14902            ensureSnapshottableDirIsMutable();
14903            super.addAll(values, snapshottableDir_);
14904            onChanged();
14905            return this;
14906          }
14907          /**
14908           * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14909           */
14910          public Builder clearSnapshottableDir() {
14911            snapshottableDir_ = java.util.Collections.emptyList();
14912            bitField0_ = (bitField0_ & ~0x00000002);
14913            onChanged();
14914            return this;
14915          }
14916    
14917          // optional uint32 numSnapshots = 3;
14918          private int numSnapshots_ ;
14919          /**
14920           * <code>optional uint32 numSnapshots = 3;</code>
14921           *
14922           * <pre>
14923           * total number of snapshots
14924           * </pre>
14925           */
14926          public boolean hasNumSnapshots() {
14927            return ((bitField0_ & 0x00000004) == 0x00000004);
14928          }
14929          /**
14930           * <code>optional uint32 numSnapshots = 3;</code>
14931           *
14932           * <pre>
14933           * total number of snapshots
14934           * </pre>
14935           */
14936          public int getNumSnapshots() {
14937            return numSnapshots_;
14938          }
14939          /**
14940           * <code>optional uint32 numSnapshots = 3;</code>
14941           *
14942           * <pre>
14943           * total number of snapshots
14944           * </pre>
14945           */
14946          public Builder setNumSnapshots(int value) {
14947            bitField0_ |= 0x00000004;
14948            numSnapshots_ = value;
14949            onChanged();
14950            return this;
14951          }
14952          /**
14953           * <code>optional uint32 numSnapshots = 3;</code>
14954           *
14955           * <pre>
14956           * total number of snapshots
14957           * </pre>
14958           */
14959          public Builder clearNumSnapshots() {
14960            bitField0_ = (bitField0_ & ~0x00000004);
14961            numSnapshots_ = 0;
14962            onChanged();
14963            return this;
14964          }
14965    
14966          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotSection)
14967        }
14968    
14969        static {
14970          defaultInstance = new SnapshotSection(true);
14971          defaultInstance.initFields();
14972        }
14973    
14974        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotSection)
14975      }
14976    
14977      public interface SnapshotDiffSectionOrBuilder
14978          extends com.google.protobuf.MessageOrBuilder {
14979      }
14980      /**
14981       * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection}
14982       *
14983       * <pre>
14984       **
14985       * This section records information about snapshot diffs
14986       * NAME: SNAPSHOT_DIFF
14987       * </pre>
14988       */
14989      public static final class SnapshotDiffSection extends
14990          com.google.protobuf.GeneratedMessage
14991          implements SnapshotDiffSectionOrBuilder {
14992        // Use SnapshotDiffSection.newBuilder() to construct.
14993        private SnapshotDiffSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
14994          super(builder);
14995          this.unknownFields = builder.getUnknownFields();
14996        }
14997        private SnapshotDiffSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
14998    
14999        private static final SnapshotDiffSection defaultInstance;
15000        public static SnapshotDiffSection getDefaultInstance() {
15001          return defaultInstance;
15002        }
15003    
15004        public SnapshotDiffSection getDefaultInstanceForType() {
15005          return defaultInstance;
15006        }
15007    
15008        private final com.google.protobuf.UnknownFieldSet unknownFields;
15009        @java.lang.Override
15010        public final com.google.protobuf.UnknownFieldSet
15011            getUnknownFields() {
15012          return this.unknownFields;
15013        }
15014        private SnapshotDiffSection(
15015            com.google.protobuf.CodedInputStream input,
15016            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15017            throws com.google.protobuf.InvalidProtocolBufferException {
15018          initFields();
15019          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15020              com.google.protobuf.UnknownFieldSet.newBuilder();
15021          try {
15022            boolean done = false;
15023            while (!done) {
15024              int tag = input.readTag();
15025              switch (tag) {
15026                case 0:
15027                  done = true;
15028                  break;
15029                default: {
15030                  if (!parseUnknownField(input, unknownFields,
15031                                         extensionRegistry, tag)) {
15032                    done = true;
15033                  }
15034                  break;
15035                }
15036              }
15037            }
15038          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15039            throw e.setUnfinishedMessage(this);
15040          } catch (java.io.IOException e) {
15041            throw new com.google.protobuf.InvalidProtocolBufferException(
15042                e.getMessage()).setUnfinishedMessage(this);
15043          } finally {
15044            this.unknownFields = unknownFields.build();
15045            makeExtensionsImmutable();
15046          }
15047        }
15048        public static final com.google.protobuf.Descriptors.Descriptor
15049            getDescriptor() {
15050          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
15051        }
15052    
15053        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15054            internalGetFieldAccessorTable() {
15055          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable
15056              .ensureFieldAccessorsInitialized(
15057                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.Builder.class);
15058        }
15059    
15060        public static com.google.protobuf.Parser<SnapshotDiffSection> PARSER =
15061            new com.google.protobuf.AbstractParser<SnapshotDiffSection>() {
15062          public SnapshotDiffSection parsePartialFrom(
15063              com.google.protobuf.CodedInputStream input,
15064              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15065              throws com.google.protobuf.InvalidProtocolBufferException {
15066            return new SnapshotDiffSection(input, extensionRegistry);
15067          }
15068        };
15069    
15070        @java.lang.Override
15071        public com.google.protobuf.Parser<SnapshotDiffSection> getParserForType() {
15072          return PARSER;
15073        }
15074    
15075        public interface CreatedListEntryOrBuilder
15076            extends com.google.protobuf.MessageOrBuilder {
15077    
15078          // optional bytes name = 1;
15079          /**
15080           * <code>optional bytes name = 1;</code>
15081           */
15082          boolean hasName();
15083          /**
15084           * <code>optional bytes name = 1;</code>
15085           */
15086          com.google.protobuf.ByteString getName();
15087        }
15088        /**
15089         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry}
15090         */
15091        public static final class CreatedListEntry extends
15092            com.google.protobuf.GeneratedMessage
15093            implements CreatedListEntryOrBuilder {
15094          // Use CreatedListEntry.newBuilder() to construct.
15095          private CreatedListEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
15096            super(builder);
15097            this.unknownFields = builder.getUnknownFields();
15098          }
15099          private CreatedListEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
15100    
15101          private static final CreatedListEntry defaultInstance;
15102          public static CreatedListEntry getDefaultInstance() {
15103            return defaultInstance;
15104          }
15105    
15106          public CreatedListEntry getDefaultInstanceForType() {
15107            return defaultInstance;
15108          }
15109    
15110          private final com.google.protobuf.UnknownFieldSet unknownFields;
15111          @java.lang.Override
15112          public final com.google.protobuf.UnknownFieldSet
15113              getUnknownFields() {
15114            return this.unknownFields;
15115          }
15116          private CreatedListEntry(
15117              com.google.protobuf.CodedInputStream input,
15118              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15119              throws com.google.protobuf.InvalidProtocolBufferException {
15120            initFields();
15121            int mutable_bitField0_ = 0;
15122            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15123                com.google.protobuf.UnknownFieldSet.newBuilder();
15124            try {
15125              boolean done = false;
15126              while (!done) {
15127                int tag = input.readTag();
15128                switch (tag) {
15129                  case 0:
15130                    done = true;
15131                    break;
15132                  default: {
15133                    if (!parseUnknownField(input, unknownFields,
15134                                           extensionRegistry, tag)) {
15135                      done = true;
15136                    }
15137                    break;
15138                  }
15139                  case 10: {
15140                    bitField0_ |= 0x00000001;
15141                    name_ = input.readBytes();
15142                    break;
15143                  }
15144                }
15145              }
15146            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15147              throw e.setUnfinishedMessage(this);
15148            } catch (java.io.IOException e) {
15149              throw new com.google.protobuf.InvalidProtocolBufferException(
15150                  e.getMessage()).setUnfinishedMessage(this);
15151            } finally {
15152              this.unknownFields = unknownFields.build();
15153              makeExtensionsImmutable();
15154            }
15155          }
15156          public static final com.google.protobuf.Descriptors.Descriptor
15157              getDescriptor() {
15158            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
15159          }
15160    
15161          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15162              internalGetFieldAccessorTable() {
15163            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable
15164                .ensureFieldAccessorsInitialized(
15165                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.Builder.class);
15166          }
15167    
15168          public static com.google.protobuf.Parser<CreatedListEntry> PARSER =
15169              new com.google.protobuf.AbstractParser<CreatedListEntry>() {
15170            public CreatedListEntry parsePartialFrom(
15171                com.google.protobuf.CodedInputStream input,
15172                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15173                throws com.google.protobuf.InvalidProtocolBufferException {
15174              return new CreatedListEntry(input, extensionRegistry);
15175            }
15176          };
15177    
15178          @java.lang.Override
15179          public com.google.protobuf.Parser<CreatedListEntry> getParserForType() {
15180            return PARSER;
15181          }
15182    
15183          private int bitField0_;
15184          // optional bytes name = 1;
15185          public static final int NAME_FIELD_NUMBER = 1;
15186          private com.google.protobuf.ByteString name_;
15187          /**
15188           * <code>optional bytes name = 1;</code>
15189           */
15190          public boolean hasName() {
15191            return ((bitField0_ & 0x00000001) == 0x00000001);
15192          }
15193          /**
15194           * <code>optional bytes name = 1;</code>
15195           */
15196          public com.google.protobuf.ByteString getName() {
15197            return name_;
15198          }
15199    
15200          private void initFields() {
15201            name_ = com.google.protobuf.ByteString.EMPTY;
15202          }
15203          private byte memoizedIsInitialized = -1;
15204          public final boolean isInitialized() {
15205            byte isInitialized = memoizedIsInitialized;
15206            if (isInitialized != -1) return isInitialized == 1;
15207    
15208            memoizedIsInitialized = 1;
15209            return true;
15210          }
15211    
15212          public void writeTo(com.google.protobuf.CodedOutputStream output)
15213                              throws java.io.IOException {
15214            getSerializedSize();
15215            if (((bitField0_ & 0x00000001) == 0x00000001)) {
15216              output.writeBytes(1, name_);
15217            }
15218            getUnknownFields().writeTo(output);
15219          }
15220    
15221          private int memoizedSerializedSize = -1;
15222          public int getSerializedSize() {
15223            int size = memoizedSerializedSize;
15224            if (size != -1) return size;
15225    
15226            size = 0;
15227            if (((bitField0_ & 0x00000001) == 0x00000001)) {
15228              size += com.google.protobuf.CodedOutputStream
15229                .computeBytesSize(1, name_);
15230            }
15231            size += getUnknownFields().getSerializedSize();
15232            memoizedSerializedSize = size;
15233            return size;
15234          }
15235    
15236          private static final long serialVersionUID = 0L;
15237          @java.lang.Override
15238          protected java.lang.Object writeReplace()
15239              throws java.io.ObjectStreamException {
15240            return super.writeReplace();
15241          }
15242    
15243          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15244              com.google.protobuf.ByteString data)
15245              throws com.google.protobuf.InvalidProtocolBufferException {
15246            return PARSER.parseFrom(data);
15247          }
15248          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15249              com.google.protobuf.ByteString data,
15250              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15251              throws com.google.protobuf.InvalidProtocolBufferException {
15252            return PARSER.parseFrom(data, extensionRegistry);
15253          }
15254          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(byte[] data)
15255              throws com.google.protobuf.InvalidProtocolBufferException {
15256            return PARSER.parseFrom(data);
15257          }
15258          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15259              byte[] data,
15260              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15261              throws com.google.protobuf.InvalidProtocolBufferException {
15262            return PARSER.parseFrom(data, extensionRegistry);
15263          }
15264          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(java.io.InputStream input)
15265              throws java.io.IOException {
15266            return PARSER.parseFrom(input);
15267          }
15268          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15269              java.io.InputStream input,
15270              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15271              throws java.io.IOException {
15272            return PARSER.parseFrom(input, extensionRegistry);
15273          }
15274          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseDelimitedFrom(java.io.InputStream input)
15275              throws java.io.IOException {
15276            return PARSER.parseDelimitedFrom(input);
15277          }
15278          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseDelimitedFrom(
15279              java.io.InputStream input,
15280              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15281              throws java.io.IOException {
15282            return PARSER.parseDelimitedFrom(input, extensionRegistry);
15283          }
15284          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15285              com.google.protobuf.CodedInputStream input)
15286              throws java.io.IOException {
15287            return PARSER.parseFrom(input);
15288          }
15289          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15290              com.google.protobuf.CodedInputStream input,
15291              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15292              throws java.io.IOException {
15293            return PARSER.parseFrom(input, extensionRegistry);
15294          }
15295    
15296          public static Builder newBuilder() { return Builder.create(); }
15297          public Builder newBuilderForType() { return newBuilder(); }
15298          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry prototype) {
15299            return newBuilder().mergeFrom(prototype);
15300          }
15301          public Builder toBuilder() { return newBuilder(this); }
15302    
15303          @java.lang.Override
15304          protected Builder newBuilderForType(
15305              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15306            Builder builder = new Builder(parent);
15307            return builder;
15308          }
15309          /**
15310           * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry}
15311           */
15312          public static final class Builder extends
15313              com.google.protobuf.GeneratedMessage.Builder<Builder>
15314             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntryOrBuilder {
15315            public static final com.google.protobuf.Descriptors.Descriptor
15316                getDescriptor() {
15317              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
15318            }
15319    
15320            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15321                internalGetFieldAccessorTable() {
15322              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable
15323                  .ensureFieldAccessorsInitialized(
15324                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.Builder.class);
15325            }
15326    
15327            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.newBuilder()
15328            private Builder() {
15329              maybeForceBuilderInitialization();
15330            }
15331    
15332            private Builder(
15333                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15334              super(parent);
15335              maybeForceBuilderInitialization();
15336            }
15337            private void maybeForceBuilderInitialization() {
15338              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
15339              }
15340            }
15341            private static Builder create() {
15342              return new Builder();
15343            }
15344    
15345            public Builder clear() {
15346              super.clear();
15347              name_ = com.google.protobuf.ByteString.EMPTY;
15348              bitField0_ = (bitField0_ & ~0x00000001);
15349              return this;
15350            }
15351    
15352            public Builder clone() {
15353              return create().mergeFrom(buildPartial());
15354            }
15355    
15356            public com.google.protobuf.Descriptors.Descriptor
15357                getDescriptorForType() {
15358              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
15359            }
15360    
15361            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry getDefaultInstanceForType() {
15362              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.getDefaultInstance();
15363            }
15364    
15365            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry build() {
15366              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry result = buildPartial();
15367              if (!result.isInitialized()) {
15368                throw newUninitializedMessageException(result);
15369              }
15370              return result;
15371            }
15372    
15373            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry buildPartial() {
15374              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry(this);
15375              int from_bitField0_ = bitField0_;
15376              int to_bitField0_ = 0;
15377              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
15378                to_bitField0_ |= 0x00000001;
15379              }
15380              result.name_ = name_;
15381              result.bitField0_ = to_bitField0_;
15382              onBuilt();
15383              return result;
15384            }
15385    
15386            public Builder mergeFrom(com.google.protobuf.Message other) {
15387              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry) {
15388                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry)other);
15389              } else {
15390                super.mergeFrom(other);
15391                return this;
15392              }
15393            }
15394    
15395            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry other) {
15396              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.getDefaultInstance()) return this;
15397              if (other.hasName()) {
15398                setName(other.getName());
15399              }
15400              this.mergeUnknownFields(other.getUnknownFields());
15401              return this;
15402            }
15403    
15404            public final boolean isInitialized() {
15405              return true;
15406            }
15407    
15408            public Builder mergeFrom(
15409                com.google.protobuf.CodedInputStream input,
15410                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15411                throws java.io.IOException {
15412              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parsedMessage = null;
15413              try {
15414                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15415              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15416                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry) e.getUnfinishedMessage();
15417                throw e;
15418              } finally {
15419                if (parsedMessage != null) {
15420                  mergeFrom(parsedMessage);
15421                }
15422              }
15423              return this;
15424            }
15425            private int bitField0_;
15426    
15427            // optional bytes name = 1;
15428            private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
15429            /**
15430             * <code>optional bytes name = 1;</code>
15431             */
15432            public boolean hasName() {
15433              return ((bitField0_ & 0x00000001) == 0x00000001);
15434            }
15435            /**
15436             * <code>optional bytes name = 1;</code>
15437             */
15438            public com.google.protobuf.ByteString getName() {
15439              return name_;
15440            }
15441            /**
15442             * <code>optional bytes name = 1;</code>
15443             */
15444            public Builder setName(com.google.protobuf.ByteString value) {
15445              if (value == null) {
15446        throw new NullPointerException();
15447      }
15448      bitField0_ |= 0x00000001;
15449              name_ = value;
15450              onChanged();
15451              return this;
15452            }
15453            /**
15454             * <code>optional bytes name = 1;</code>
15455             */
15456            public Builder clearName() {
15457              bitField0_ = (bitField0_ & ~0x00000001);
15458              name_ = getDefaultInstance().getName();
15459              onChanged();
15460              return this;
15461            }
15462    
15463            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry)
15464          }
15465    
15466          static {
15467            defaultInstance = new CreatedListEntry(true);
15468            defaultInstance.initFields();
15469          }
15470    
15471          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry)
15472        }
15473    
15474        public interface DirectoryDiffOrBuilder
15475            extends com.google.protobuf.MessageOrBuilder {
15476    
15477          // optional uint32 snapshotId = 1;
15478          /**
15479           * <code>optional uint32 snapshotId = 1;</code>
15480           */
15481          boolean hasSnapshotId();
15482          /**
15483           * <code>optional uint32 snapshotId = 1;</code>
15484           */
15485          int getSnapshotId();
15486    
15487          // optional uint32 childrenSize = 2;
15488          /**
15489           * <code>optional uint32 childrenSize = 2;</code>
15490           */
15491          boolean hasChildrenSize();
15492          /**
15493           * <code>optional uint32 childrenSize = 2;</code>
15494           */
15495          int getChildrenSize();
15496    
15497          // optional bool isSnapshotRoot = 3;
15498          /**
15499           * <code>optional bool isSnapshotRoot = 3;</code>
15500           */
15501          boolean hasIsSnapshotRoot();
15502          /**
15503           * <code>optional bool isSnapshotRoot = 3;</code>
15504           */
15505          boolean getIsSnapshotRoot();
15506    
15507          // optional bytes name = 4;
15508          /**
15509           * <code>optional bytes name = 4;</code>
15510           */
15511          boolean hasName();
15512          /**
15513           * <code>optional bytes name = 4;</code>
15514           */
15515          com.google.protobuf.ByteString getName();
15516    
15517          // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;
15518          /**
15519           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15520           */
15521          boolean hasSnapshotCopy();
15522          /**
15523           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15524           */
15525          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getSnapshotCopy();
15526          /**
15527           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15528           */
15529          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getSnapshotCopyOrBuilder();
15530    
15531          // optional uint32 createdListSize = 6;
15532          /**
15533           * <code>optional uint32 createdListSize = 6;</code>
15534           */
15535          boolean hasCreatedListSize();
15536          /**
15537           * <code>optional uint32 createdListSize = 6;</code>
15538           */
15539          int getCreatedListSize();
15540    
15541          // repeated uint64 deletedINode = 7 [packed = true];
15542          /**
15543           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15544           *
15545           * <pre>
15546           * id of deleted inodes
15547           * </pre>
15548           */
15549          java.util.List<java.lang.Long> getDeletedINodeList();
15550          /**
15551           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15552           *
15553           * <pre>
15554           * id of deleted inodes
15555           * </pre>
15556           */
15557          int getDeletedINodeCount();
15558          /**
15559           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15560           *
15561           * <pre>
15562           * id of deleted inodes
15563           * </pre>
15564           */
15565          long getDeletedINode(int index);
15566    
15567          // repeated uint32 deletedINodeRef = 8 [packed = true];
15568          /**
15569           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15570           *
15571           * <pre>
15572           * id of reference nodes in the deleted list
15573           * </pre>
15574           */
15575          java.util.List<java.lang.Integer> getDeletedINodeRefList();
15576          /**
15577           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15578           *
15579           * <pre>
15580           * id of reference nodes in the deleted list
15581           * </pre>
15582           */
15583          int getDeletedINodeRefCount();
15584          /**
15585           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15586           *
15587           * <pre>
15588           * id of reference nodes in the deleted list
15589           * </pre>
15590           */
15591          int getDeletedINodeRef(int index);
15592        }
15593        /**
15594         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff}
15595         */
15596        public static final class DirectoryDiff extends
15597            com.google.protobuf.GeneratedMessage
15598            implements DirectoryDiffOrBuilder {
15599          // Use DirectoryDiff.newBuilder() to construct.
15600          private DirectoryDiff(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
15601            super(builder);
15602            this.unknownFields = builder.getUnknownFields();
15603          }
15604          private DirectoryDiff(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
15605    
15606          private static final DirectoryDiff defaultInstance;
15607          public static DirectoryDiff getDefaultInstance() {
15608            return defaultInstance;
15609          }
15610    
15611          public DirectoryDiff getDefaultInstanceForType() {
15612            return defaultInstance;
15613          }
15614    
15615          private final com.google.protobuf.UnknownFieldSet unknownFields;
15616          @java.lang.Override
15617          public final com.google.protobuf.UnknownFieldSet
15618              getUnknownFields() {
15619            return this.unknownFields;
15620          }
15621          private DirectoryDiff(
15622              com.google.protobuf.CodedInputStream input,
15623              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15624              throws com.google.protobuf.InvalidProtocolBufferException {
15625            initFields();
15626            int mutable_bitField0_ = 0;
15627            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15628                com.google.protobuf.UnknownFieldSet.newBuilder();
15629            try {
15630              boolean done = false;
15631              while (!done) {
15632                int tag = input.readTag();
15633                switch (tag) {
15634                  case 0:
15635                    done = true;
15636                    break;
15637                  default: {
15638                    if (!parseUnknownField(input, unknownFields,
15639                                           extensionRegistry, tag)) {
15640                      done = true;
15641                    }
15642                    break;
15643                  }
15644                  case 8: {
15645                    bitField0_ |= 0x00000001;
15646                    snapshotId_ = input.readUInt32();
15647                    break;
15648                  }
15649                  case 16: {
15650                    bitField0_ |= 0x00000002;
15651                    childrenSize_ = input.readUInt32();
15652                    break;
15653                  }
15654                  case 24: {
15655                    bitField0_ |= 0x00000004;
15656                    isSnapshotRoot_ = input.readBool();
15657                    break;
15658                  }
15659                  case 34: {
15660                    bitField0_ |= 0x00000008;
15661                    name_ = input.readBytes();
15662                    break;
15663                  }
15664                  case 42: {
15665                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder subBuilder = null;
15666                    if (((bitField0_ & 0x00000010) == 0x00000010)) {
15667                      subBuilder = snapshotCopy_.toBuilder();
15668                    }
15669                    snapshotCopy_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.PARSER, extensionRegistry);
15670                    if (subBuilder != null) {
15671                      subBuilder.mergeFrom(snapshotCopy_);
15672                      snapshotCopy_ = subBuilder.buildPartial();
15673                    }
15674                    bitField0_ |= 0x00000010;
15675                    break;
15676                  }
15677                  case 48: {
15678                    bitField0_ |= 0x00000020;
15679                    createdListSize_ = input.readUInt32();
15680                    break;
15681                  }
15682                  case 56: {
15683                    if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
15684                      deletedINode_ = new java.util.ArrayList<java.lang.Long>();
15685                      mutable_bitField0_ |= 0x00000040;
15686                    }
15687                    deletedINode_.add(input.readUInt64());
15688                    break;
15689                  }
15690                  case 58: {
15691                    int length = input.readRawVarint32();
15692                    int limit = input.pushLimit(length);
15693                    if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) {
15694                      deletedINode_ = new java.util.ArrayList<java.lang.Long>();
15695                      mutable_bitField0_ |= 0x00000040;
15696                    }
15697                    while (input.getBytesUntilLimit() > 0) {
15698                      deletedINode_.add(input.readUInt64());
15699                    }
15700                    input.popLimit(limit);
15701                    break;
15702                  }
15703                  case 64: {
15704                    if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
15705                      deletedINodeRef_ = new java.util.ArrayList<java.lang.Integer>();
15706                      mutable_bitField0_ |= 0x00000080;
15707                    }
15708                    deletedINodeRef_.add(input.readUInt32());
15709                    break;
15710                  }
15711                  case 66: {
15712                    int length = input.readRawVarint32();
15713                    int limit = input.pushLimit(length);
15714                    if (!((mutable_bitField0_ & 0x00000080) == 0x00000080) && input.getBytesUntilLimit() > 0) {
15715                      deletedINodeRef_ = new java.util.ArrayList<java.lang.Integer>();
15716                      mutable_bitField0_ |= 0x00000080;
15717                    }
15718                    while (input.getBytesUntilLimit() > 0) {
15719                      deletedINodeRef_.add(input.readUInt32());
15720                    }
15721                    input.popLimit(limit);
15722                    break;
15723                  }
15724                }
15725              }
15726            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15727              throw e.setUnfinishedMessage(this);
15728            } catch (java.io.IOException e) {
15729              throw new com.google.protobuf.InvalidProtocolBufferException(
15730                  e.getMessage()).setUnfinishedMessage(this);
15731            } finally {
15732              if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
15733                deletedINode_ = java.util.Collections.unmodifiableList(deletedINode_);
15734              }
15735              if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
15736                deletedINodeRef_ = java.util.Collections.unmodifiableList(deletedINodeRef_);
15737              }
15738              this.unknownFields = unknownFields.build();
15739              makeExtensionsImmutable();
15740            }
15741          }
15742          public static final com.google.protobuf.Descriptors.Descriptor
15743              getDescriptor() {
15744            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
15745          }
15746    
15747          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15748              internalGetFieldAccessorTable() {
15749            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable
15750                .ensureFieldAccessorsInitialized(
15751                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.Builder.class);
15752          }
15753    
15754          public static com.google.protobuf.Parser<DirectoryDiff> PARSER =
15755              new com.google.protobuf.AbstractParser<DirectoryDiff>() {
15756            public DirectoryDiff parsePartialFrom(
15757                com.google.protobuf.CodedInputStream input,
15758                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15759                throws com.google.protobuf.InvalidProtocolBufferException {
15760              return new DirectoryDiff(input, extensionRegistry);
15761            }
15762          };
15763    
15764          @java.lang.Override
15765          public com.google.protobuf.Parser<DirectoryDiff> getParserForType() {
15766            return PARSER;
15767          }
15768    
15769          private int bitField0_;
15770          // optional uint32 snapshotId = 1;
15771          public static final int SNAPSHOTID_FIELD_NUMBER = 1;
15772          private int snapshotId_;
15773          /**
15774           * <code>optional uint32 snapshotId = 1;</code>
15775           */
15776          public boolean hasSnapshotId() {
15777            return ((bitField0_ & 0x00000001) == 0x00000001);
15778          }
15779          /**
15780           * <code>optional uint32 snapshotId = 1;</code>
15781           */
15782          public int getSnapshotId() {
15783            return snapshotId_;
15784          }
15785    
15786          // optional uint32 childrenSize = 2;
15787          public static final int CHILDRENSIZE_FIELD_NUMBER = 2;
15788          private int childrenSize_;
15789          /**
15790           * <code>optional uint32 childrenSize = 2;</code>
15791           */
15792          public boolean hasChildrenSize() {
15793            return ((bitField0_ & 0x00000002) == 0x00000002);
15794          }
15795          /**
15796           * <code>optional uint32 childrenSize = 2;</code>
15797           */
15798          public int getChildrenSize() {
15799            return childrenSize_;
15800          }
15801    
15802          // optional bool isSnapshotRoot = 3;
15803          public static final int ISSNAPSHOTROOT_FIELD_NUMBER = 3;
15804          private boolean isSnapshotRoot_;
15805          /**
15806           * <code>optional bool isSnapshotRoot = 3;</code>
15807           */
15808          public boolean hasIsSnapshotRoot() {
15809            return ((bitField0_ & 0x00000004) == 0x00000004);
15810          }
15811          /**
15812           * <code>optional bool isSnapshotRoot = 3;</code>
15813           */
15814          public boolean getIsSnapshotRoot() {
15815            return isSnapshotRoot_;
15816          }
15817    
15818          // optional bytes name = 4;
15819          public static final int NAME_FIELD_NUMBER = 4;
15820          private com.google.protobuf.ByteString name_;
15821          /**
15822           * <code>optional bytes name = 4;</code>
15823           */
15824          public boolean hasName() {
15825            return ((bitField0_ & 0x00000008) == 0x00000008);
15826          }
15827          /**
15828           * <code>optional bytes name = 4;</code>
15829           */
15830          public com.google.protobuf.ByteString getName() {
15831            return name_;
15832          }
15833    
15834          // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;
15835          public static final int SNAPSHOTCOPY_FIELD_NUMBER = 5;
15836          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory snapshotCopy_;
15837          /**
15838           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15839           */
15840          public boolean hasSnapshotCopy() {
15841            return ((bitField0_ & 0x00000010) == 0x00000010);
15842          }
15843          /**
15844           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15845           */
15846          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getSnapshotCopy() {
15847            return snapshotCopy_;
15848          }
15849          /**
15850           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15851           */
15852          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getSnapshotCopyOrBuilder() {
15853            return snapshotCopy_;
15854          }
15855    
15856          // optional uint32 createdListSize = 6;
15857          public static final int CREATEDLISTSIZE_FIELD_NUMBER = 6;
15858          private int createdListSize_;
15859          /**
15860           * <code>optional uint32 createdListSize = 6;</code>
15861           */
15862          public boolean hasCreatedListSize() {
15863            return ((bitField0_ & 0x00000020) == 0x00000020);
15864          }
15865          /**
15866           * <code>optional uint32 createdListSize = 6;</code>
15867           */
15868          public int getCreatedListSize() {
15869            return createdListSize_;
15870          }
15871    
15872          // repeated uint64 deletedINode = 7 [packed = true];
15873          public static final int DELETEDINODE_FIELD_NUMBER = 7;
15874          private java.util.List<java.lang.Long> deletedINode_;
15875          /**
15876           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15877           *
15878           * <pre>
15879           * id of deleted inodes
15880           * </pre>
15881           */
15882          public java.util.List<java.lang.Long>
15883              getDeletedINodeList() {
15884            return deletedINode_;
15885          }
15886          /**
15887           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15888           *
15889           * <pre>
15890           * id of deleted inodes
15891           * </pre>
15892           */
15893          public int getDeletedINodeCount() {
15894            return deletedINode_.size();
15895          }
15896          /**
15897           * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15898           *
15899           * <pre>
15900           * id of deleted inodes
15901           * </pre>
15902           */
15903          public long getDeletedINode(int index) {
15904            return deletedINode_.get(index);
15905          }
15906          private int deletedINodeMemoizedSerializedSize = -1;
15907    
15908          // repeated uint32 deletedINodeRef = 8 [packed = true];
15909          public static final int DELETEDINODEREF_FIELD_NUMBER = 8;
15910          private java.util.List<java.lang.Integer> deletedINodeRef_;
15911          /**
15912           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15913           *
15914           * <pre>
15915           * id of reference nodes in the deleted list
15916           * </pre>
15917           */
15918          public java.util.List<java.lang.Integer>
15919              getDeletedINodeRefList() {
15920            return deletedINodeRef_;
15921          }
15922          /**
15923           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15924           *
15925           * <pre>
15926           * id of reference nodes in the deleted list
15927           * </pre>
15928           */
15929          public int getDeletedINodeRefCount() {
15930            return deletedINodeRef_.size();
15931          }
15932          /**
15933           * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15934           *
15935           * <pre>
15936           * id of reference nodes in the deleted list
15937           * </pre>
15938           */
15939          public int getDeletedINodeRef(int index) {
15940            return deletedINodeRef_.get(index);
15941          }
15942          private int deletedINodeRefMemoizedSerializedSize = -1;
15943    
15944          private void initFields() {
15945            snapshotId_ = 0;
15946            childrenSize_ = 0;
15947            isSnapshotRoot_ = false;
15948            name_ = com.google.protobuf.ByteString.EMPTY;
15949            snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
15950            createdListSize_ = 0;
15951            deletedINode_ = java.util.Collections.emptyList();
15952            deletedINodeRef_ = java.util.Collections.emptyList();
15953          }
15954          private byte memoizedIsInitialized = -1;
15955          public final boolean isInitialized() {
15956            byte isInitialized = memoizedIsInitialized;
15957            if (isInitialized != -1) return isInitialized == 1;
15958    
15959            if (hasSnapshotCopy()) {
15960              if (!getSnapshotCopy().isInitialized()) {
15961                memoizedIsInitialized = 0;
15962                return false;
15963              }
15964            }
15965            memoizedIsInitialized = 1;
15966            return true;
15967          }
15968    
15969          public void writeTo(com.google.protobuf.CodedOutputStream output)
15970                              throws java.io.IOException {
15971            getSerializedSize();
15972            if (((bitField0_ & 0x00000001) == 0x00000001)) {
15973              output.writeUInt32(1, snapshotId_);
15974            }
15975            if (((bitField0_ & 0x00000002) == 0x00000002)) {
15976              output.writeUInt32(2, childrenSize_);
15977            }
15978            if (((bitField0_ & 0x00000004) == 0x00000004)) {
15979              output.writeBool(3, isSnapshotRoot_);
15980            }
15981            if (((bitField0_ & 0x00000008) == 0x00000008)) {
15982              output.writeBytes(4, name_);
15983            }
15984            if (((bitField0_ & 0x00000010) == 0x00000010)) {
15985              output.writeMessage(5, snapshotCopy_);
15986            }
15987            if (((bitField0_ & 0x00000020) == 0x00000020)) {
15988              output.writeUInt32(6, createdListSize_);
15989            }
15990            if (getDeletedINodeList().size() > 0) {
15991              output.writeRawVarint32(58);
15992              output.writeRawVarint32(deletedINodeMemoizedSerializedSize);
15993            }
15994            for (int i = 0; i < deletedINode_.size(); i++) {
15995              output.writeUInt64NoTag(deletedINode_.get(i));
15996            }
15997            if (getDeletedINodeRefList().size() > 0) {
15998              output.writeRawVarint32(66);
15999              output.writeRawVarint32(deletedINodeRefMemoizedSerializedSize);
16000            }
16001            for (int i = 0; i < deletedINodeRef_.size(); i++) {
16002              output.writeUInt32NoTag(deletedINodeRef_.get(i));
16003            }
16004            getUnknownFields().writeTo(output);
16005          }
16006    
16007          private int memoizedSerializedSize = -1;
16008          public int getSerializedSize() {
16009            int size = memoizedSerializedSize;
16010            if (size != -1) return size;
16011    
16012            size = 0;
16013            if (((bitField0_ & 0x00000001) == 0x00000001)) {
16014              size += com.google.protobuf.CodedOutputStream
16015                .computeUInt32Size(1, snapshotId_);
16016            }
16017            if (((bitField0_ & 0x00000002) == 0x00000002)) {
16018              size += com.google.protobuf.CodedOutputStream
16019                .computeUInt32Size(2, childrenSize_);
16020            }
16021            if (((bitField0_ & 0x00000004) == 0x00000004)) {
16022              size += com.google.protobuf.CodedOutputStream
16023                .computeBoolSize(3, isSnapshotRoot_);
16024            }
16025            if (((bitField0_ & 0x00000008) == 0x00000008)) {
16026              size += com.google.protobuf.CodedOutputStream
16027                .computeBytesSize(4, name_);
16028            }
16029            if (((bitField0_ & 0x00000010) == 0x00000010)) {
16030              size += com.google.protobuf.CodedOutputStream
16031                .computeMessageSize(5, snapshotCopy_);
16032            }
16033            if (((bitField0_ & 0x00000020) == 0x00000020)) {
16034              size += com.google.protobuf.CodedOutputStream
16035                .computeUInt32Size(6, createdListSize_);
16036            }
16037            {
16038              int dataSize = 0;
16039              for (int i = 0; i < deletedINode_.size(); i++) {
16040                dataSize += com.google.protobuf.CodedOutputStream
16041                  .computeUInt64SizeNoTag(deletedINode_.get(i));
16042              }
16043              size += dataSize;
16044              if (!getDeletedINodeList().isEmpty()) {
16045                size += 1;
16046                size += com.google.protobuf.CodedOutputStream
16047                    .computeInt32SizeNoTag(dataSize);
16048              }
16049              deletedINodeMemoizedSerializedSize = dataSize;
16050            }
16051            {
16052              int dataSize = 0;
16053              for (int i = 0; i < deletedINodeRef_.size(); i++) {
16054                dataSize += com.google.protobuf.CodedOutputStream
16055                  .computeUInt32SizeNoTag(deletedINodeRef_.get(i));
16056              }
16057              size += dataSize;
16058              if (!getDeletedINodeRefList().isEmpty()) {
16059                size += 1;
16060                size += com.google.protobuf.CodedOutputStream
16061                    .computeInt32SizeNoTag(dataSize);
16062              }
16063              deletedINodeRefMemoizedSerializedSize = dataSize;
16064            }
16065            size += getUnknownFields().getSerializedSize();
16066            memoizedSerializedSize = size;
16067            return size;
16068          }
16069    
16070          private static final long serialVersionUID = 0L;
16071          @java.lang.Override
16072          protected java.lang.Object writeReplace()
16073              throws java.io.ObjectStreamException {
16074            return super.writeReplace();
16075          }
16076    
16077          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
16078              com.google.protobuf.ByteString data)
16079              throws com.google.protobuf.InvalidProtocolBufferException {
16080            return PARSER.parseFrom(data);
16081          }
16082          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
16083              com.google.protobuf.ByteString data,
16084              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16085              throws com.google.protobuf.InvalidProtocolBufferException {
16086            return PARSER.parseFrom(data, extensionRegistry);
16087          }
16088          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(byte[] data)
16089              throws com.google.protobuf.InvalidProtocolBufferException {
16090            return PARSER.parseFrom(data);
16091          }
16092          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
16093              byte[] data,
16094              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16095              throws com.google.protobuf.InvalidProtocolBufferException {
16096            return PARSER.parseFrom(data, extensionRegistry);
16097          }
16098          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(java.io.InputStream input)
16099              throws java.io.IOException {
16100            return PARSER.parseFrom(input);
16101          }
16102          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
16103              java.io.InputStream input,
16104              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16105              throws java.io.IOException {
16106            return PARSER.parseFrom(input, extensionRegistry);
16107          }
16108          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseDelimitedFrom(java.io.InputStream input)
16109              throws java.io.IOException {
16110            return PARSER.parseDelimitedFrom(input);
16111          }
16112          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseDelimitedFrom(
16113              java.io.InputStream input,
16114              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16115              throws java.io.IOException {
16116            return PARSER.parseDelimitedFrom(input, extensionRegistry);
16117          }
16118          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
16119              com.google.protobuf.CodedInputStream input)
16120              throws java.io.IOException {
16121            return PARSER.parseFrom(input);
16122          }
16123          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
16124              com.google.protobuf.CodedInputStream input,
16125              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16126              throws java.io.IOException {
16127            return PARSER.parseFrom(input, extensionRegistry);
16128          }
16129    
16130          public static Builder newBuilder() { return Builder.create(); }
16131          public Builder newBuilderForType() { return newBuilder(); }
16132          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff prototype) {
16133            return newBuilder().mergeFrom(prototype);
16134          }
16135          public Builder toBuilder() { return newBuilder(this); }
16136    
16137          @java.lang.Override
16138          protected Builder newBuilderForType(
16139              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16140            Builder builder = new Builder(parent);
16141            return builder;
16142          }
16143          /**
16144           * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff}
16145           */
16146          public static final class Builder extends
16147              com.google.protobuf.GeneratedMessage.Builder<Builder>
16148             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiffOrBuilder {
16149            public static final com.google.protobuf.Descriptors.Descriptor
16150                getDescriptor() {
16151              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
16152            }
16153    
16154            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16155                internalGetFieldAccessorTable() {
16156              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable
16157                  .ensureFieldAccessorsInitialized(
16158                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.Builder.class);
16159            }
16160    
16161            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.newBuilder()
16162            private Builder() {
16163              maybeForceBuilderInitialization();
16164            }
16165    
16166            private Builder(
16167                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16168              super(parent);
16169              maybeForceBuilderInitialization();
16170            }
16171            private void maybeForceBuilderInitialization() {
16172              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
16173                getSnapshotCopyFieldBuilder();
16174              }
16175            }
16176            private static Builder create() {
16177              return new Builder();
16178            }
16179    
16180            public Builder clear() {
16181              super.clear();
16182              snapshotId_ = 0;
16183              bitField0_ = (bitField0_ & ~0x00000001);
16184              childrenSize_ = 0;
16185              bitField0_ = (bitField0_ & ~0x00000002);
16186              isSnapshotRoot_ = false;
16187              bitField0_ = (bitField0_ & ~0x00000004);
16188              name_ = com.google.protobuf.ByteString.EMPTY;
16189              bitField0_ = (bitField0_ & ~0x00000008);
16190              if (snapshotCopyBuilder_ == null) {
16191                snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
16192              } else {
16193                snapshotCopyBuilder_.clear();
16194              }
16195              bitField0_ = (bitField0_ & ~0x00000010);
16196              createdListSize_ = 0;
16197              bitField0_ = (bitField0_ & ~0x00000020);
16198              deletedINode_ = java.util.Collections.emptyList();
16199              bitField0_ = (bitField0_ & ~0x00000040);
16200              deletedINodeRef_ = java.util.Collections.emptyList();
16201              bitField0_ = (bitField0_ & ~0x00000080);
16202              return this;
16203            }
16204    
16205            public Builder clone() {
16206              return create().mergeFrom(buildPartial());
16207            }
16208    
16209            public com.google.protobuf.Descriptors.Descriptor
16210                getDescriptorForType() {
16211              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
16212            }
16213    
16214            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff getDefaultInstanceForType() {
16215              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.getDefaultInstance();
16216            }
16217    
16218            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff build() {
16219              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff result = buildPartial();
16220              if (!result.isInitialized()) {
16221                throw newUninitializedMessageException(result);
16222              }
16223              return result;
16224            }
16225    
16226            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff buildPartial() {
16227              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff(this);
16228              int from_bitField0_ = bitField0_;
16229              int to_bitField0_ = 0;
16230              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
16231                to_bitField0_ |= 0x00000001;
16232              }
16233              result.snapshotId_ = snapshotId_;
16234              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
16235                to_bitField0_ |= 0x00000002;
16236              }
16237              result.childrenSize_ = childrenSize_;
16238              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
16239                to_bitField0_ |= 0x00000004;
16240              }
16241              result.isSnapshotRoot_ = isSnapshotRoot_;
16242              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
16243                to_bitField0_ |= 0x00000008;
16244              }
16245              result.name_ = name_;
16246              if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
16247                to_bitField0_ |= 0x00000010;
16248              }
16249              if (snapshotCopyBuilder_ == null) {
16250                result.snapshotCopy_ = snapshotCopy_;
16251              } else {
16252                result.snapshotCopy_ = snapshotCopyBuilder_.build();
16253              }
16254              if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
16255                to_bitField0_ |= 0x00000020;
16256              }
16257              result.createdListSize_ = createdListSize_;
16258              if (((bitField0_ & 0x00000040) == 0x00000040)) {
16259                deletedINode_ = java.util.Collections.unmodifiableList(deletedINode_);
16260                bitField0_ = (bitField0_ & ~0x00000040);
16261              }
16262              result.deletedINode_ = deletedINode_;
16263              if (((bitField0_ & 0x00000080) == 0x00000080)) {
16264                deletedINodeRef_ = java.util.Collections.unmodifiableList(deletedINodeRef_);
16265                bitField0_ = (bitField0_ & ~0x00000080);
16266              }
16267              result.deletedINodeRef_ = deletedINodeRef_;
16268              result.bitField0_ = to_bitField0_;
16269              onBuilt();
16270              return result;
16271            }
16272    
16273            public Builder mergeFrom(com.google.protobuf.Message other) {
16274              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff) {
16275                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff)other);
16276              } else {
16277                super.mergeFrom(other);
16278                return this;
16279              }
16280            }
16281    
16282            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff other) {
16283              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.getDefaultInstance()) return this;
16284              if (other.hasSnapshotId()) {
16285                setSnapshotId(other.getSnapshotId());
16286              }
16287              if (other.hasChildrenSize()) {
16288                setChildrenSize(other.getChildrenSize());
16289              }
16290              if (other.hasIsSnapshotRoot()) {
16291                setIsSnapshotRoot(other.getIsSnapshotRoot());
16292              }
16293              if (other.hasName()) {
16294                setName(other.getName());
16295              }
16296              if (other.hasSnapshotCopy()) {
16297                mergeSnapshotCopy(other.getSnapshotCopy());
16298              }
16299              if (other.hasCreatedListSize()) {
16300                setCreatedListSize(other.getCreatedListSize());
16301              }
16302              if (!other.deletedINode_.isEmpty()) {
16303                if (deletedINode_.isEmpty()) {
16304                  deletedINode_ = other.deletedINode_;
16305                  bitField0_ = (bitField0_ & ~0x00000040);
16306                } else {
16307                  ensureDeletedINodeIsMutable();
16308                  deletedINode_.addAll(other.deletedINode_);
16309                }
16310                onChanged();
16311              }
16312              if (!other.deletedINodeRef_.isEmpty()) {
16313                if (deletedINodeRef_.isEmpty()) {
16314                  deletedINodeRef_ = other.deletedINodeRef_;
16315                  bitField0_ = (bitField0_ & ~0x00000080);
16316                } else {
16317                  ensureDeletedINodeRefIsMutable();
16318                  deletedINodeRef_.addAll(other.deletedINodeRef_);
16319                }
16320                onChanged();
16321              }
16322              this.mergeUnknownFields(other.getUnknownFields());
16323              return this;
16324            }
16325    
16326            public final boolean isInitialized() {
16327              if (hasSnapshotCopy()) {
16328                if (!getSnapshotCopy().isInitialized()) {
16329                  
16330                  return false;
16331                }
16332              }
16333              return true;
16334            }
16335    
16336            public Builder mergeFrom(
16337                com.google.protobuf.CodedInputStream input,
16338                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16339                throws java.io.IOException {
16340              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parsedMessage = null;
16341              try {
16342                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
16343              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16344                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff) e.getUnfinishedMessage();
16345                throw e;
16346              } finally {
16347                if (parsedMessage != null) {
16348                  mergeFrom(parsedMessage);
16349                }
16350              }
16351              return this;
16352            }
16353            private int bitField0_;
16354    
16355            // optional uint32 snapshotId = 1;
16356            private int snapshotId_ ;
16357            /**
16358             * <code>optional uint32 snapshotId = 1;</code>
16359             */
16360            public boolean hasSnapshotId() {
16361              return ((bitField0_ & 0x00000001) == 0x00000001);
16362            }
16363            /**
16364             * <code>optional uint32 snapshotId = 1;</code>
16365             */
16366            public int getSnapshotId() {
16367              return snapshotId_;
16368            }
16369            /**
16370             * <code>optional uint32 snapshotId = 1;</code>
16371             */
16372            public Builder setSnapshotId(int value) {
16373              bitField0_ |= 0x00000001;
16374              snapshotId_ = value;
16375              onChanged();
16376              return this;
16377            }
16378            /**
16379             * <code>optional uint32 snapshotId = 1;</code>
16380             */
16381            public Builder clearSnapshotId() {
16382              bitField0_ = (bitField0_ & ~0x00000001);
16383              snapshotId_ = 0;
16384              onChanged();
16385              return this;
16386            }
16387    
16388            // optional uint32 childrenSize = 2;
16389            private int childrenSize_ ;
16390            /**
16391             * <code>optional uint32 childrenSize = 2;</code>
16392             */
16393            public boolean hasChildrenSize() {
16394              return ((bitField0_ & 0x00000002) == 0x00000002);
16395            }
16396            /**
16397             * <code>optional uint32 childrenSize = 2;</code>
16398             */
16399            public int getChildrenSize() {
16400              return childrenSize_;
16401            }
16402            /**
16403             * <code>optional uint32 childrenSize = 2;</code>
16404             */
16405            public Builder setChildrenSize(int value) {
16406              bitField0_ |= 0x00000002;
16407              childrenSize_ = value;
16408              onChanged();
16409              return this;
16410            }
16411            /**
16412             * <code>optional uint32 childrenSize = 2;</code>
16413             */
16414            public Builder clearChildrenSize() {
16415              bitField0_ = (bitField0_ & ~0x00000002);
16416              childrenSize_ = 0;
16417              onChanged();
16418              return this;
16419            }
16420    
16421            // optional bool isSnapshotRoot = 3;
16422            private boolean isSnapshotRoot_ ;
16423            /**
16424             * <code>optional bool isSnapshotRoot = 3;</code>
16425             */
16426            public boolean hasIsSnapshotRoot() {
16427              return ((bitField0_ & 0x00000004) == 0x00000004);
16428            }
16429            /**
16430             * <code>optional bool isSnapshotRoot = 3;</code>
16431             */
16432            public boolean getIsSnapshotRoot() {
16433              return isSnapshotRoot_;
16434            }
16435            /**
16436             * <code>optional bool isSnapshotRoot = 3;</code>
16437             */
16438            public Builder setIsSnapshotRoot(boolean value) {
16439              bitField0_ |= 0x00000004;
16440              isSnapshotRoot_ = value;
16441              onChanged();
16442              return this;
16443            }
16444            /**
16445             * <code>optional bool isSnapshotRoot = 3;</code>
16446             */
16447            public Builder clearIsSnapshotRoot() {
16448              bitField0_ = (bitField0_ & ~0x00000004);
16449              isSnapshotRoot_ = false;
16450              onChanged();
16451              return this;
16452            }
16453    
16454            // optional bytes name = 4;
16455            private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
16456            /**
16457             * <code>optional bytes name = 4;</code>
16458             */
16459            public boolean hasName() {
16460              return ((bitField0_ & 0x00000008) == 0x00000008);
16461            }
16462            /**
16463             * <code>optional bytes name = 4;</code>
16464             */
16465            public com.google.protobuf.ByteString getName() {
16466              return name_;
16467            }
16468            /**
16469             * <code>optional bytes name = 4;</code>
16470             */
16471            public Builder setName(com.google.protobuf.ByteString value) {
16472              if (value == null) {
16473        throw new NullPointerException();
16474      }
16475      bitField0_ |= 0x00000008;
16476              name_ = value;
16477              onChanged();
16478              return this;
16479            }
16480            /**
16481             * <code>optional bytes name = 4;</code>
16482             */
16483            public Builder clearName() {
16484              bitField0_ = (bitField0_ & ~0x00000008);
16485              name_ = getDefaultInstance().getName();
16486              onChanged();
16487              return this;
16488            }
16489    
16490            // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;
16491            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
16492            private com.google.protobuf.SingleFieldBuilder<
16493                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> snapshotCopyBuilder_;
16494            /**
16495             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16496             */
16497            public boolean hasSnapshotCopy() {
16498              return ((bitField0_ & 0x00000010) == 0x00000010);
16499            }
16500            /**
16501             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16502             */
16503            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getSnapshotCopy() {
16504              if (snapshotCopyBuilder_ == null) {
16505                return snapshotCopy_;
16506              } else {
16507                return snapshotCopyBuilder_.getMessage();
16508              }
16509            }
16510            /**
16511             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16512             */
16513            public Builder setSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
16514              if (snapshotCopyBuilder_ == null) {
16515                if (value == null) {
16516                  throw new NullPointerException();
16517                }
16518                snapshotCopy_ = value;
16519                onChanged();
16520              } else {
16521                snapshotCopyBuilder_.setMessage(value);
16522              }
16523              bitField0_ |= 0x00000010;
16524              return this;
16525            }
16526            /**
16527             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16528             */
16529            public Builder setSnapshotCopy(
16530                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder builderForValue) {
16531              if (snapshotCopyBuilder_ == null) {
16532                snapshotCopy_ = builderForValue.build();
16533                onChanged();
16534              } else {
16535                snapshotCopyBuilder_.setMessage(builderForValue.build());
16536              }
16537              bitField0_ |= 0x00000010;
16538              return this;
16539            }
16540            /**
16541             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16542             */
16543            public Builder mergeSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
16544              if (snapshotCopyBuilder_ == null) {
16545                if (((bitField0_ & 0x00000010) == 0x00000010) &&
16546                    snapshotCopy_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance()) {
16547                  snapshotCopy_ =
16548                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.newBuilder(snapshotCopy_).mergeFrom(value).buildPartial();
16549                } else {
16550                  snapshotCopy_ = value;
16551                }
16552                onChanged();
16553              } else {
16554                snapshotCopyBuilder_.mergeFrom(value);
16555              }
16556              bitField0_ |= 0x00000010;
16557              return this;
16558            }
16559            /**
16560             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16561             */
16562            public Builder clearSnapshotCopy() {
16563              if (snapshotCopyBuilder_ == null) {
16564                snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
16565                onChanged();
16566              } else {
16567                snapshotCopyBuilder_.clear();
16568              }
16569              bitField0_ = (bitField0_ & ~0x00000010);
16570              return this;
16571            }
16572            /**
16573             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16574             */
16575            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder getSnapshotCopyBuilder() {
16576              bitField0_ |= 0x00000010;
16577              onChanged();
16578              return getSnapshotCopyFieldBuilder().getBuilder();
16579            }
16580            /**
16581             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16582             */
16583            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getSnapshotCopyOrBuilder() {
16584              if (snapshotCopyBuilder_ != null) {
16585                return snapshotCopyBuilder_.getMessageOrBuilder();
16586              } else {
16587                return snapshotCopy_;
16588              }
16589            }
16590            /**
16591             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16592             */
16593            private com.google.protobuf.SingleFieldBuilder<
16594                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> 
16595                getSnapshotCopyFieldBuilder() {
16596              if (snapshotCopyBuilder_ == null) {
16597                snapshotCopyBuilder_ = new com.google.protobuf.SingleFieldBuilder<
16598                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder>(
16599                        snapshotCopy_,
16600                        getParentForChildren(),
16601                        isClean());
16602                snapshotCopy_ = null;
16603              }
16604              return snapshotCopyBuilder_;
16605            }
16606    
16607            // optional uint32 createdListSize = 6;
16608            private int createdListSize_ ;
16609            /**
16610             * <code>optional uint32 createdListSize = 6;</code>
16611             */
16612            public boolean hasCreatedListSize() {
16613              return ((bitField0_ & 0x00000020) == 0x00000020);
16614            }
16615            /**
16616             * <code>optional uint32 createdListSize = 6;</code>
16617             */
16618            public int getCreatedListSize() {
16619              return createdListSize_;
16620            }
16621            /**
16622             * <code>optional uint32 createdListSize = 6;</code>
16623             */
16624            public Builder setCreatedListSize(int value) {
16625              bitField0_ |= 0x00000020;
16626              createdListSize_ = value;
16627              onChanged();
16628              return this;
16629            }
16630            /**
16631             * <code>optional uint32 createdListSize = 6;</code>
16632             */
16633            public Builder clearCreatedListSize() {
16634              bitField0_ = (bitField0_ & ~0x00000020);
16635              createdListSize_ = 0;
16636              onChanged();
16637              return this;
16638            }
16639    
16640            // repeated uint64 deletedINode = 7 [packed = true];
16641            private java.util.List<java.lang.Long> deletedINode_ = java.util.Collections.emptyList();
16642            private void ensureDeletedINodeIsMutable() {
16643              if (!((bitField0_ & 0x00000040) == 0x00000040)) {
16644                deletedINode_ = new java.util.ArrayList<java.lang.Long>(deletedINode_);
16645                bitField0_ |= 0x00000040;
16646               }
16647            }
16648            /**
16649             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16650             *
16651             * <pre>
16652             * id of deleted inodes
16653             * </pre>
16654             */
16655            public java.util.List<java.lang.Long>
16656                getDeletedINodeList() {
16657              return java.util.Collections.unmodifiableList(deletedINode_);
16658            }
16659            /**
16660             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16661             *
16662             * <pre>
16663             * id of deleted inodes
16664             * </pre>
16665             */
16666            public int getDeletedINodeCount() {
16667              return deletedINode_.size();
16668            }
16669            /**
16670             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16671             *
16672             * <pre>
16673             * id of deleted inodes
16674             * </pre>
16675             */
16676            public long getDeletedINode(int index) {
16677              return deletedINode_.get(index);
16678            }
16679            /**
16680             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16681             *
16682             * <pre>
16683             * id of deleted inodes
16684             * </pre>
16685             */
16686            public Builder setDeletedINode(
16687                int index, long value) {
16688              ensureDeletedINodeIsMutable();
16689              deletedINode_.set(index, value);
16690              onChanged();
16691              return this;
16692            }
16693            /**
16694             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16695             *
16696             * <pre>
16697             * id of deleted inodes
16698             * </pre>
16699             */
16700            public Builder addDeletedINode(long value) {
16701              ensureDeletedINodeIsMutable();
16702              deletedINode_.add(value);
16703              onChanged();
16704              return this;
16705            }
16706            /**
16707             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16708             *
16709             * <pre>
16710             * id of deleted inodes
16711             * </pre>
16712             */
16713            public Builder addAllDeletedINode(
16714                java.lang.Iterable<? extends java.lang.Long> values) {
16715              ensureDeletedINodeIsMutable();
16716              super.addAll(values, deletedINode_);
16717              onChanged();
16718              return this;
16719            }
16720            /**
16721             * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16722             *
16723             * <pre>
16724             * id of deleted inodes
16725             * </pre>
16726             */
16727            public Builder clearDeletedINode() {
16728              deletedINode_ = java.util.Collections.emptyList();
16729              bitField0_ = (bitField0_ & ~0x00000040);
16730              onChanged();
16731              return this;
16732            }
16733    
16734            // repeated uint32 deletedINodeRef = 8 [packed = true];
16735            private java.util.List<java.lang.Integer> deletedINodeRef_ = java.util.Collections.emptyList();
16736            private void ensureDeletedINodeRefIsMutable() {
16737              if (!((bitField0_ & 0x00000080) == 0x00000080)) {
16738                deletedINodeRef_ = new java.util.ArrayList<java.lang.Integer>(deletedINodeRef_);
16739                bitField0_ |= 0x00000080;
16740               }
16741            }
16742            /**
16743             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16744             *
16745             * <pre>
16746             * id of reference nodes in the deleted list
16747             * </pre>
16748             */
16749            public java.util.List<java.lang.Integer>
16750                getDeletedINodeRefList() {
16751              return java.util.Collections.unmodifiableList(deletedINodeRef_);
16752            }
16753            /**
16754             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16755             *
16756             * <pre>
16757             * id of reference nodes in the deleted list
16758             * </pre>
16759             */
16760            public int getDeletedINodeRefCount() {
16761              return deletedINodeRef_.size();
16762            }
16763            /**
16764             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16765             *
16766             * <pre>
16767             * id of reference nodes in the deleted list
16768             * </pre>
16769             */
16770            public int getDeletedINodeRef(int index) {
16771              return deletedINodeRef_.get(index);
16772            }
16773            /**
16774             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16775             *
16776             * <pre>
16777             * id of reference nodes in the deleted list
16778             * </pre>
16779             */
16780            public Builder setDeletedINodeRef(
16781                int index, int value) {
16782              ensureDeletedINodeRefIsMutable();
16783              deletedINodeRef_.set(index, value);
16784              onChanged();
16785              return this;
16786            }
16787            /**
16788             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16789             *
16790             * <pre>
16791             * id of reference nodes in the deleted list
16792             * </pre>
16793             */
16794            public Builder addDeletedINodeRef(int value) {
16795              ensureDeletedINodeRefIsMutable();
16796              deletedINodeRef_.add(value);
16797              onChanged();
16798              return this;
16799            }
16800            /**
16801             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16802             *
16803             * <pre>
16804             * id of reference nodes in the deleted list
16805             * </pre>
16806             */
16807            public Builder addAllDeletedINodeRef(
16808                java.lang.Iterable<? extends java.lang.Integer> values) {
16809              ensureDeletedINodeRefIsMutable();
16810              super.addAll(values, deletedINodeRef_);
16811              onChanged();
16812              return this;
16813            }
16814            /**
16815             * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16816             *
16817             * <pre>
16818             * id of reference nodes in the deleted list
16819             * </pre>
16820             */
16821            public Builder clearDeletedINodeRef() {
16822              deletedINodeRef_ = java.util.Collections.emptyList();
16823              bitField0_ = (bitField0_ & ~0x00000080);
16824              onChanged();
16825              return this;
16826            }
16827    
16828            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff)
16829          }
16830    
16831          static {
16832            defaultInstance = new DirectoryDiff(true);
16833            defaultInstance.initFields();
16834          }
16835    
16836          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff)
16837        }
16838    
16839        public interface FileDiffOrBuilder
16840            extends com.google.protobuf.MessageOrBuilder {
16841    
16842          // optional uint32 snapshotId = 1;
16843          /**
16844           * <code>optional uint32 snapshotId = 1;</code>
16845           */
16846          boolean hasSnapshotId();
16847          /**
16848           * <code>optional uint32 snapshotId = 1;</code>
16849           */
16850          int getSnapshotId();
16851    
16852          // optional uint64 fileSize = 2;
16853          /**
16854           * <code>optional uint64 fileSize = 2;</code>
16855           */
16856          boolean hasFileSize();
16857          /**
16858           * <code>optional uint64 fileSize = 2;</code>
16859           */
16860          long getFileSize();
16861    
16862          // optional bytes name = 3;
16863          /**
16864           * <code>optional bytes name = 3;</code>
16865           */
16866          boolean hasName();
16867          /**
16868           * <code>optional bytes name = 3;</code>
16869           */
16870          com.google.protobuf.ByteString getName();
16871    
16872          // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;
16873          /**
16874           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
16875           */
16876          boolean hasSnapshotCopy();
16877          /**
16878           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
16879           */
16880          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getSnapshotCopy();
16881          /**
16882           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
16883           */
16884          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getSnapshotCopyOrBuilder();
16885        }
16886        /**
16887         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff}
16888         */
16889        public static final class FileDiff extends
16890            com.google.protobuf.GeneratedMessage
16891            implements FileDiffOrBuilder {
16892          // Use FileDiff.newBuilder() to construct.
16893          private FileDiff(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16894            super(builder);
16895            this.unknownFields = builder.getUnknownFields();
16896          }
16897          private FileDiff(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16898    
16899          private static final FileDiff defaultInstance;
16900          public static FileDiff getDefaultInstance() {
16901            return defaultInstance;
16902          }
16903    
16904          public FileDiff getDefaultInstanceForType() {
16905            return defaultInstance;
16906          }
16907    
16908          private final com.google.protobuf.UnknownFieldSet unknownFields;
16909          @java.lang.Override
16910          public final com.google.protobuf.UnknownFieldSet
16911              getUnknownFields() {
16912            return this.unknownFields;
16913          }
16914          private FileDiff(
16915              com.google.protobuf.CodedInputStream input,
16916              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16917              throws com.google.protobuf.InvalidProtocolBufferException {
16918            initFields();
16919            int mutable_bitField0_ = 0;
16920            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16921                com.google.protobuf.UnknownFieldSet.newBuilder();
16922            try {
16923              boolean done = false;
16924              while (!done) {
16925                int tag = input.readTag();
16926                switch (tag) {
16927                  case 0:
16928                    done = true;
16929                    break;
16930                  default: {
16931                    if (!parseUnknownField(input, unknownFields,
16932                                           extensionRegistry, tag)) {
16933                      done = true;
16934                    }
16935                    break;
16936                  }
16937                  case 8: {
16938                    bitField0_ |= 0x00000001;
16939                    snapshotId_ = input.readUInt32();
16940                    break;
16941                  }
16942                  case 16: {
16943                    bitField0_ |= 0x00000002;
16944                    fileSize_ = input.readUInt64();
16945                    break;
16946                  }
16947                  case 26: {
16948                    bitField0_ |= 0x00000004;
16949                    name_ = input.readBytes();
16950                    break;
16951                  }
16952                  case 34: {
16953                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder subBuilder = null;
16954                    if (((bitField0_ & 0x00000008) == 0x00000008)) {
16955                      subBuilder = snapshotCopy_.toBuilder();
16956                    }
16957                    snapshotCopy_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.PARSER, extensionRegistry);
16958                    if (subBuilder != null) {
16959                      subBuilder.mergeFrom(snapshotCopy_);
16960                      snapshotCopy_ = subBuilder.buildPartial();
16961                    }
16962                    bitField0_ |= 0x00000008;
16963                    break;
16964                  }
16965                }
16966              }
16967            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16968              throw e.setUnfinishedMessage(this);
16969            } catch (java.io.IOException e) {
16970              throw new com.google.protobuf.InvalidProtocolBufferException(
16971                  e.getMessage()).setUnfinishedMessage(this);
16972            } finally {
16973              this.unknownFields = unknownFields.build();
16974              makeExtensionsImmutable();
16975            }
16976          }
16977          public static final com.google.protobuf.Descriptors.Descriptor
16978              getDescriptor() {
16979            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
16980          }
16981    
16982          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16983              internalGetFieldAccessorTable() {
16984            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable
16985                .ensureFieldAccessorsInitialized(
16986                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.Builder.class);
16987          }
16988    
16989          public static com.google.protobuf.Parser<FileDiff> PARSER =
16990              new com.google.protobuf.AbstractParser<FileDiff>() {
16991            public FileDiff parsePartialFrom(
16992                com.google.protobuf.CodedInputStream input,
16993                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16994                throws com.google.protobuf.InvalidProtocolBufferException {
16995              return new FileDiff(input, extensionRegistry);
16996            }
16997          };
16998    
16999          @java.lang.Override
17000          public com.google.protobuf.Parser<FileDiff> getParserForType() {
17001            return PARSER;
17002          }
17003    
17004          private int bitField0_;
17005          // optional uint32 snapshotId = 1;
17006          public static final int SNAPSHOTID_FIELD_NUMBER = 1;
17007          private int snapshotId_;
17008          /**
17009           * <code>optional uint32 snapshotId = 1;</code>
17010           */
17011          public boolean hasSnapshotId() {
17012            return ((bitField0_ & 0x00000001) == 0x00000001);
17013          }
17014          /**
17015           * <code>optional uint32 snapshotId = 1;</code>
17016           */
17017          public int getSnapshotId() {
17018            return snapshotId_;
17019          }
17020    
17021          // optional uint64 fileSize = 2;
17022          public static final int FILESIZE_FIELD_NUMBER = 2;
17023          private long fileSize_;
17024          /**
17025           * <code>optional uint64 fileSize = 2;</code>
17026           */
17027          public boolean hasFileSize() {
17028            return ((bitField0_ & 0x00000002) == 0x00000002);
17029          }
17030          /**
17031           * <code>optional uint64 fileSize = 2;</code>
17032           */
17033          public long getFileSize() {
17034            return fileSize_;
17035          }
17036    
17037          // optional bytes name = 3;
17038          public static final int NAME_FIELD_NUMBER = 3;
17039          private com.google.protobuf.ByteString name_;
17040          /**
17041           * <code>optional bytes name = 3;</code>
17042           */
17043          public boolean hasName() {
17044            return ((bitField0_ & 0x00000004) == 0x00000004);
17045          }
17046          /**
17047           * <code>optional bytes name = 3;</code>
17048           */
17049          public com.google.protobuf.ByteString getName() {
17050            return name_;
17051          }
17052    
17053          // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;
17054          public static final int SNAPSHOTCOPY_FIELD_NUMBER = 4;
17055          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile snapshotCopy_;
17056          /**
17057           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17058           */
17059          public boolean hasSnapshotCopy() {
17060            return ((bitField0_ & 0x00000008) == 0x00000008);
17061          }
17062          /**
17063           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17064           */
17065          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getSnapshotCopy() {
17066            return snapshotCopy_;
17067          }
17068          /**
17069           * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17070           */
17071          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getSnapshotCopyOrBuilder() {
17072            return snapshotCopy_;
17073          }
17074    
17075          private void initFields() {
17076            snapshotId_ = 0;
17077            fileSize_ = 0L;
17078            name_ = com.google.protobuf.ByteString.EMPTY;
17079            snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
17080          }
17081          private byte memoizedIsInitialized = -1;
17082          public final boolean isInitialized() {
17083            byte isInitialized = memoizedIsInitialized;
17084            if (isInitialized != -1) return isInitialized == 1;
17085    
17086            if (hasSnapshotCopy()) {
17087              if (!getSnapshotCopy().isInitialized()) {
17088                memoizedIsInitialized = 0;
17089                return false;
17090              }
17091            }
17092            memoizedIsInitialized = 1;
17093            return true;
17094          }
17095    
17096          public void writeTo(com.google.protobuf.CodedOutputStream output)
17097                              throws java.io.IOException {
17098            getSerializedSize();
17099            if (((bitField0_ & 0x00000001) == 0x00000001)) {
17100              output.writeUInt32(1, snapshotId_);
17101            }
17102            if (((bitField0_ & 0x00000002) == 0x00000002)) {
17103              output.writeUInt64(2, fileSize_);
17104            }
17105            if (((bitField0_ & 0x00000004) == 0x00000004)) {
17106              output.writeBytes(3, name_);
17107            }
17108            if (((bitField0_ & 0x00000008) == 0x00000008)) {
17109              output.writeMessage(4, snapshotCopy_);
17110            }
17111            getUnknownFields().writeTo(output);
17112          }
17113    
17114          private int memoizedSerializedSize = -1;
17115          public int getSerializedSize() {
17116            int size = memoizedSerializedSize;
17117            if (size != -1) return size;
17118    
17119            size = 0;
17120            if (((bitField0_ & 0x00000001) == 0x00000001)) {
17121              size += com.google.protobuf.CodedOutputStream
17122                .computeUInt32Size(1, snapshotId_);
17123            }
17124            if (((bitField0_ & 0x00000002) == 0x00000002)) {
17125              size += com.google.protobuf.CodedOutputStream
17126                .computeUInt64Size(2, fileSize_);
17127            }
17128            if (((bitField0_ & 0x00000004) == 0x00000004)) {
17129              size += com.google.protobuf.CodedOutputStream
17130                .computeBytesSize(3, name_);
17131            }
17132            if (((bitField0_ & 0x00000008) == 0x00000008)) {
17133              size += com.google.protobuf.CodedOutputStream
17134                .computeMessageSize(4, snapshotCopy_);
17135            }
17136            size += getUnknownFields().getSerializedSize();
17137            memoizedSerializedSize = size;
17138            return size;
17139          }
17140    
17141          private static final long serialVersionUID = 0L;
17142          @java.lang.Override
17143          protected java.lang.Object writeReplace()
17144              throws java.io.ObjectStreamException {
17145            return super.writeReplace();
17146          }
17147    
17148          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17149              com.google.protobuf.ByteString data)
17150              throws com.google.protobuf.InvalidProtocolBufferException {
17151            return PARSER.parseFrom(data);
17152          }
17153          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17154              com.google.protobuf.ByteString data,
17155              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17156              throws com.google.protobuf.InvalidProtocolBufferException {
17157            return PARSER.parseFrom(data, extensionRegistry);
17158          }
17159          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(byte[] data)
17160              throws com.google.protobuf.InvalidProtocolBufferException {
17161            return PARSER.parseFrom(data);
17162          }
17163          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17164              byte[] data,
17165              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17166              throws com.google.protobuf.InvalidProtocolBufferException {
17167            return PARSER.parseFrom(data, extensionRegistry);
17168          }
17169          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(java.io.InputStream input)
17170              throws java.io.IOException {
17171            return PARSER.parseFrom(input);
17172          }
17173          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17174              java.io.InputStream input,
17175              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17176              throws java.io.IOException {
17177            return PARSER.parseFrom(input, extensionRegistry);
17178          }
17179          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseDelimitedFrom(java.io.InputStream input)
17180              throws java.io.IOException {
17181            return PARSER.parseDelimitedFrom(input);
17182          }
17183          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseDelimitedFrom(
17184              java.io.InputStream input,
17185              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17186              throws java.io.IOException {
17187            return PARSER.parseDelimitedFrom(input, extensionRegistry);
17188          }
17189          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17190              com.google.protobuf.CodedInputStream input)
17191              throws java.io.IOException {
17192            return PARSER.parseFrom(input);
17193          }
17194          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17195              com.google.protobuf.CodedInputStream input,
17196              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17197              throws java.io.IOException {
17198            return PARSER.parseFrom(input, extensionRegistry);
17199          }
17200    
17201          public static Builder newBuilder() { return Builder.create(); }
17202          public Builder newBuilderForType() { return newBuilder(); }
17203          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff prototype) {
17204            return newBuilder().mergeFrom(prototype);
17205          }
17206          public Builder toBuilder() { return newBuilder(this); }
17207    
17208          @java.lang.Override
17209          protected Builder newBuilderForType(
17210              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17211            Builder builder = new Builder(parent);
17212            return builder;
17213          }
17214          /**
17215           * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff}
17216           */
17217          public static final class Builder extends
17218              com.google.protobuf.GeneratedMessage.Builder<Builder>
17219             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiffOrBuilder {
17220            public static final com.google.protobuf.Descriptors.Descriptor
17221                getDescriptor() {
17222              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
17223            }
17224    
17225            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17226                internalGetFieldAccessorTable() {
17227              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable
17228                  .ensureFieldAccessorsInitialized(
17229                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.Builder.class);
17230            }
17231    
17232            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.newBuilder()
17233            private Builder() {
17234              maybeForceBuilderInitialization();
17235            }
17236    
17237            private Builder(
17238                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17239              super(parent);
17240              maybeForceBuilderInitialization();
17241            }
17242            private void maybeForceBuilderInitialization() {
17243              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17244                getSnapshotCopyFieldBuilder();
17245              }
17246            }
17247            private static Builder create() {
17248              return new Builder();
17249            }
17250    
17251            public Builder clear() {
17252              super.clear();
17253              snapshotId_ = 0;
17254              bitField0_ = (bitField0_ & ~0x00000001);
17255              fileSize_ = 0L;
17256              bitField0_ = (bitField0_ & ~0x00000002);
17257              name_ = com.google.protobuf.ByteString.EMPTY;
17258              bitField0_ = (bitField0_ & ~0x00000004);
17259              if (snapshotCopyBuilder_ == null) {
17260                snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
17261              } else {
17262                snapshotCopyBuilder_.clear();
17263              }
17264              bitField0_ = (bitField0_ & ~0x00000008);
17265              return this;
17266            }
17267    
17268            public Builder clone() {
17269              return create().mergeFrom(buildPartial());
17270            }
17271    
17272            public com.google.protobuf.Descriptors.Descriptor
17273                getDescriptorForType() {
17274              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
17275            }
17276    
17277            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff getDefaultInstanceForType() {
17278              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.getDefaultInstance();
17279            }
17280    
17281            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff build() {
17282              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff result = buildPartial();
17283              if (!result.isInitialized()) {
17284                throw newUninitializedMessageException(result);
17285              }
17286              return result;
17287            }
17288    
17289            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff buildPartial() {
17290              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff(this);
17291              int from_bitField0_ = bitField0_;
17292              int to_bitField0_ = 0;
17293              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
17294                to_bitField0_ |= 0x00000001;
17295              }
17296              result.snapshotId_ = snapshotId_;
17297              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
17298                to_bitField0_ |= 0x00000002;
17299              }
17300              result.fileSize_ = fileSize_;
17301              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
17302                to_bitField0_ |= 0x00000004;
17303              }
17304              result.name_ = name_;
17305              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
17306                to_bitField0_ |= 0x00000008;
17307              }
17308              if (snapshotCopyBuilder_ == null) {
17309                result.snapshotCopy_ = snapshotCopy_;
17310              } else {
17311                result.snapshotCopy_ = snapshotCopyBuilder_.build();
17312              }
17313              result.bitField0_ = to_bitField0_;
17314              onBuilt();
17315              return result;
17316            }
17317    
17318            public Builder mergeFrom(com.google.protobuf.Message other) {
17319              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff) {
17320                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff)other);
17321              } else {
17322                super.mergeFrom(other);
17323                return this;
17324              }
17325            }
17326    
17327            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff other) {
17328              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.getDefaultInstance()) return this;
17329              if (other.hasSnapshotId()) {
17330                setSnapshotId(other.getSnapshotId());
17331              }
17332              if (other.hasFileSize()) {
17333                setFileSize(other.getFileSize());
17334              }
17335              if (other.hasName()) {
17336                setName(other.getName());
17337              }
17338              if (other.hasSnapshotCopy()) {
17339                mergeSnapshotCopy(other.getSnapshotCopy());
17340              }
17341              this.mergeUnknownFields(other.getUnknownFields());
17342              return this;
17343            }
17344    
17345            public final boolean isInitialized() {
17346              if (hasSnapshotCopy()) {
17347                if (!getSnapshotCopy().isInitialized()) {
17348                  
17349                  return false;
17350                }
17351              }
17352              return true;
17353            }
17354    
17355            public Builder mergeFrom(
17356                com.google.protobuf.CodedInputStream input,
17357                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17358                throws java.io.IOException {
17359              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parsedMessage = null;
17360              try {
17361                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17362              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17363                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff) e.getUnfinishedMessage();
17364                throw e;
17365              } finally {
17366                if (parsedMessage != null) {
17367                  mergeFrom(parsedMessage);
17368                }
17369              }
17370              return this;
17371            }
17372            private int bitField0_;
17373    
17374            // optional uint32 snapshotId = 1;
17375            private int snapshotId_ ;
17376            /**
17377             * <code>optional uint32 snapshotId = 1;</code>
17378             */
17379            public boolean hasSnapshotId() {
17380              return ((bitField0_ & 0x00000001) == 0x00000001);
17381            }
17382            /**
17383             * <code>optional uint32 snapshotId = 1;</code>
17384             */
17385            public int getSnapshotId() {
17386              return snapshotId_;
17387            }
17388            /**
17389             * <code>optional uint32 snapshotId = 1;</code>
17390             */
17391            public Builder setSnapshotId(int value) {
17392              bitField0_ |= 0x00000001;
17393              snapshotId_ = value;
17394              onChanged();
17395              return this;
17396            }
17397            /**
17398             * <code>optional uint32 snapshotId = 1;</code>
17399             */
17400            public Builder clearSnapshotId() {
17401              bitField0_ = (bitField0_ & ~0x00000001);
17402              snapshotId_ = 0;
17403              onChanged();
17404              return this;
17405            }
17406    
17407            // optional uint64 fileSize = 2;
17408            private long fileSize_ ;
17409            /**
17410             * <code>optional uint64 fileSize = 2;</code>
17411             */
17412            public boolean hasFileSize() {
17413              return ((bitField0_ & 0x00000002) == 0x00000002);
17414            }
17415            /**
17416             * <code>optional uint64 fileSize = 2;</code>
17417             */
17418            public long getFileSize() {
17419              return fileSize_;
17420            }
17421            /**
17422             * <code>optional uint64 fileSize = 2;</code>
17423             */
17424            public Builder setFileSize(long value) {
17425              bitField0_ |= 0x00000002;
17426              fileSize_ = value;
17427              onChanged();
17428              return this;
17429            }
17430            /**
17431             * <code>optional uint64 fileSize = 2;</code>
17432             */
17433            public Builder clearFileSize() {
17434              bitField0_ = (bitField0_ & ~0x00000002);
17435              fileSize_ = 0L;
17436              onChanged();
17437              return this;
17438            }
17439    
17440            // optional bytes name = 3;
17441            private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
17442            /**
17443             * <code>optional bytes name = 3;</code>
17444             */
17445            public boolean hasName() {
17446              return ((bitField0_ & 0x00000004) == 0x00000004);
17447            }
17448            /**
17449             * <code>optional bytes name = 3;</code>
17450             */
17451            public com.google.protobuf.ByteString getName() {
17452              return name_;
17453            }
17454            /**
17455             * <code>optional bytes name = 3;</code>
17456             */
17457            public Builder setName(com.google.protobuf.ByteString value) {
17458              if (value == null) {
17459        throw new NullPointerException();
17460      }
17461      bitField0_ |= 0x00000004;
17462              name_ = value;
17463              onChanged();
17464              return this;
17465            }
17466            /**
17467             * <code>optional bytes name = 3;</code>
17468             */
17469            public Builder clearName() {
17470              bitField0_ = (bitField0_ & ~0x00000004);
17471              name_ = getDefaultInstance().getName();
17472              onChanged();
17473              return this;
17474            }
17475    
17476            // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;
17477            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
17478            private com.google.protobuf.SingleFieldBuilder<
17479                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> snapshotCopyBuilder_;
17480            /**
17481             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17482             */
17483            public boolean hasSnapshotCopy() {
17484              return ((bitField0_ & 0x00000008) == 0x00000008);
17485            }
17486            /**
17487             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17488             */
17489            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getSnapshotCopy() {
17490              if (snapshotCopyBuilder_ == null) {
17491                return snapshotCopy_;
17492              } else {
17493                return snapshotCopyBuilder_.getMessage();
17494              }
17495            }
17496            /**
17497             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17498             */
17499            public Builder setSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
17500              if (snapshotCopyBuilder_ == null) {
17501                if (value == null) {
17502                  throw new NullPointerException();
17503                }
17504                snapshotCopy_ = value;
17505                onChanged();
17506              } else {
17507                snapshotCopyBuilder_.setMessage(value);
17508              }
17509              bitField0_ |= 0x00000008;
17510              return this;
17511            }
17512            /**
17513             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17514             */
17515            public Builder setSnapshotCopy(
17516                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder builderForValue) {
17517              if (snapshotCopyBuilder_ == null) {
17518                snapshotCopy_ = builderForValue.build();
17519                onChanged();
17520              } else {
17521                snapshotCopyBuilder_.setMessage(builderForValue.build());
17522              }
17523              bitField0_ |= 0x00000008;
17524              return this;
17525            }
17526            /**
17527             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17528             */
17529            public Builder mergeSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
17530              if (snapshotCopyBuilder_ == null) {
17531                if (((bitField0_ & 0x00000008) == 0x00000008) &&
17532                    snapshotCopy_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance()) {
17533                  snapshotCopy_ =
17534                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.newBuilder(snapshotCopy_).mergeFrom(value).buildPartial();
17535                } else {
17536                  snapshotCopy_ = value;
17537                }
17538                onChanged();
17539              } else {
17540                snapshotCopyBuilder_.mergeFrom(value);
17541              }
17542              bitField0_ |= 0x00000008;
17543              return this;
17544            }
17545            /**
17546             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17547             */
17548            public Builder clearSnapshotCopy() {
17549              if (snapshotCopyBuilder_ == null) {
17550                snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
17551                onChanged();
17552              } else {
17553                snapshotCopyBuilder_.clear();
17554              }
17555              bitField0_ = (bitField0_ & ~0x00000008);
17556              return this;
17557            }
17558            /**
17559             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17560             */
17561            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder getSnapshotCopyBuilder() {
17562              bitField0_ |= 0x00000008;
17563              onChanged();
17564              return getSnapshotCopyFieldBuilder().getBuilder();
17565            }
17566            /**
17567             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17568             */
17569            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getSnapshotCopyOrBuilder() {
17570              if (snapshotCopyBuilder_ != null) {
17571                return snapshotCopyBuilder_.getMessageOrBuilder();
17572              } else {
17573                return snapshotCopy_;
17574              }
17575            }
17576            /**
17577             * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17578             */
17579            private com.google.protobuf.SingleFieldBuilder<
17580                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> 
17581                getSnapshotCopyFieldBuilder() {
17582              if (snapshotCopyBuilder_ == null) {
17583                snapshotCopyBuilder_ = new com.google.protobuf.SingleFieldBuilder<
17584                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder>(
17585                        snapshotCopy_,
17586                        getParentForChildren(),
17587                        isClean());
17588                snapshotCopy_ = null;
17589              }
17590              return snapshotCopyBuilder_;
17591            }
17592    
17593            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff)
17594          }
17595    
17596          static {
17597            defaultInstance = new FileDiff(true);
17598            defaultInstance.initFields();
17599          }
17600    
17601          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff)
17602        }
17603    
17604        public interface DiffEntryOrBuilder
17605            extends com.google.protobuf.MessageOrBuilder {
17606    
17607          // required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;
17608          /**
17609           * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
17610           */
17611          boolean hasType();
17612          /**
17613           * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
17614           */
17615          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type getType();
17616    
17617          // optional uint64 inodeId = 2;
17618          /**
17619           * <code>optional uint64 inodeId = 2;</code>
17620           */
17621          boolean hasInodeId();
17622          /**
17623           * <code>optional uint64 inodeId = 2;</code>
17624           */
17625          long getInodeId();
17626    
17627          // optional uint32 numOfDiff = 3;
17628          /**
17629           * <code>optional uint32 numOfDiff = 3;</code>
17630           */
17631          boolean hasNumOfDiff();
17632          /**
17633           * <code>optional uint32 numOfDiff = 3;</code>
17634           */
17635          int getNumOfDiff();
17636        }
17637        /**
17638         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry}
17639         */
17640        public static final class DiffEntry extends
17641            com.google.protobuf.GeneratedMessage
17642            implements DiffEntryOrBuilder {
17643          // Use DiffEntry.newBuilder() to construct.
17644          private DiffEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17645            super(builder);
17646            this.unknownFields = builder.getUnknownFields();
17647          }
17648          private DiffEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
17649    
17650          private static final DiffEntry defaultInstance;
17651          public static DiffEntry getDefaultInstance() {
17652            return defaultInstance;
17653          }
17654    
17655          public DiffEntry getDefaultInstanceForType() {
17656            return defaultInstance;
17657          }
17658    
17659          private final com.google.protobuf.UnknownFieldSet unknownFields;
17660          @java.lang.Override
17661          public final com.google.protobuf.UnknownFieldSet
17662              getUnknownFields() {
17663            return this.unknownFields;
17664          }
17665          private DiffEntry(
17666              com.google.protobuf.CodedInputStream input,
17667              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17668              throws com.google.protobuf.InvalidProtocolBufferException {
17669            initFields();
17670            int mutable_bitField0_ = 0;
17671            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
17672                com.google.protobuf.UnknownFieldSet.newBuilder();
17673            try {
17674              boolean done = false;
17675              while (!done) {
17676                int tag = input.readTag();
17677                switch (tag) {
17678                  case 0:
17679                    done = true;
17680                    break;
17681                  default: {
17682                    if (!parseUnknownField(input, unknownFields,
17683                                           extensionRegistry, tag)) {
17684                      done = true;
17685                    }
17686                    break;
17687                  }
17688                  case 8: {
17689                    int rawValue = input.readEnum();
17690                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type value = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.valueOf(rawValue);
17691                    if (value == null) {
17692                      unknownFields.mergeVarintField(1, rawValue);
17693                    } else {
17694                      bitField0_ |= 0x00000001;
17695                      type_ = value;
17696                    }
17697                    break;
17698                  }
17699                  case 16: {
17700                    bitField0_ |= 0x00000002;
17701                    inodeId_ = input.readUInt64();
17702                    break;
17703                  }
17704                  case 24: {
17705                    bitField0_ |= 0x00000004;
17706                    numOfDiff_ = input.readUInt32();
17707                    break;
17708                  }
17709                }
17710              }
17711            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17712              throw e.setUnfinishedMessage(this);
17713            } catch (java.io.IOException e) {
17714              throw new com.google.protobuf.InvalidProtocolBufferException(
17715                  e.getMessage()).setUnfinishedMessage(this);
17716            } finally {
17717              this.unknownFields = unknownFields.build();
17718              makeExtensionsImmutable();
17719            }
17720          }
17721          public static final com.google.protobuf.Descriptors.Descriptor
17722              getDescriptor() {
17723            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
17724          }
17725    
17726          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17727              internalGetFieldAccessorTable() {
17728            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable
17729                .ensureFieldAccessorsInitialized(
17730                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Builder.class);
17731          }
17732    
17733          public static com.google.protobuf.Parser<DiffEntry> PARSER =
17734              new com.google.protobuf.AbstractParser<DiffEntry>() {
17735            public DiffEntry parsePartialFrom(
17736                com.google.protobuf.CodedInputStream input,
17737                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17738                throws com.google.protobuf.InvalidProtocolBufferException {
17739              return new DiffEntry(input, extensionRegistry);
17740            }
17741          };
17742    
17743          @java.lang.Override
17744          public com.google.protobuf.Parser<DiffEntry> getParserForType() {
17745            return PARSER;
17746          }
17747    
17748          /**
17749           * Protobuf enum {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type}
17750           */
17751          public enum Type
17752              implements com.google.protobuf.ProtocolMessageEnum {
17753            /**
17754             * <code>FILEDIFF = 1;</code>
17755             */
17756            FILEDIFF(0, 1),
17757            /**
17758             * <code>DIRECTORYDIFF = 2;</code>
17759             */
17760            DIRECTORYDIFF(1, 2),
17761            ;
17762    
17763            /**
17764             * <code>FILEDIFF = 1;</code>
17765             */
17766            public static final int FILEDIFF_VALUE = 1;
17767            /**
17768             * <code>DIRECTORYDIFF = 2;</code>
17769             */
17770            public static final int DIRECTORYDIFF_VALUE = 2;
17771    
17772    
17773            public final int getNumber() { return value; }
17774    
17775            public static Type valueOf(int value) {
17776              switch (value) {
17777                case 1: return FILEDIFF;
17778                case 2: return DIRECTORYDIFF;
17779                default: return null;
17780              }
17781            }
17782    
17783            public static com.google.protobuf.Internal.EnumLiteMap<Type>
17784                internalGetValueMap() {
17785              return internalValueMap;
17786            }
17787            private static com.google.protobuf.Internal.EnumLiteMap<Type>
17788                internalValueMap =
17789                  new com.google.protobuf.Internal.EnumLiteMap<Type>() {
17790                    public Type findValueByNumber(int number) {
17791                      return Type.valueOf(number);
17792                    }
17793                  };
17794    
17795            public final com.google.protobuf.Descriptors.EnumValueDescriptor
17796                getValueDescriptor() {
17797              return getDescriptor().getValues().get(index);
17798            }
17799            public final com.google.protobuf.Descriptors.EnumDescriptor
17800                getDescriptorForType() {
17801              return getDescriptor();
17802            }
17803            public static final com.google.protobuf.Descriptors.EnumDescriptor
17804                getDescriptor() {
17805              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.getDescriptor().getEnumTypes().get(0);
17806            }
17807    
17808            private static final Type[] VALUES = values();
17809    
17810            public static Type valueOf(
17811                com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
17812              if (desc.getType() != getDescriptor()) {
17813                throw new java.lang.IllegalArgumentException(
17814                  "EnumValueDescriptor is not for this type.");
17815              }
17816              return VALUES[desc.getIndex()];
17817            }
17818    
17819            private final int index;
17820            private final int value;
17821    
17822            private Type(int index, int value) {
17823              this.index = index;
17824              this.value = value;
17825            }
17826    
17827            // @@protoc_insertion_point(enum_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type)
17828          }
17829    
17830          private int bitField0_;
17831          // required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;
17832          public static final int TYPE_FIELD_NUMBER = 1;
17833          private org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type type_;
17834          /**
17835           * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
17836           */
17837          public boolean hasType() {
17838            return ((bitField0_ & 0x00000001) == 0x00000001);
17839          }
17840          /**
17841           * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
17842           */
17843          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type getType() {
17844            return type_;
17845          }
17846    
17847          // optional uint64 inodeId = 2;
17848          public static final int INODEID_FIELD_NUMBER = 2;
17849          private long inodeId_;
17850          /**
17851           * <code>optional uint64 inodeId = 2;</code>
17852           */
17853          public boolean hasInodeId() {
17854            return ((bitField0_ & 0x00000002) == 0x00000002);
17855          }
17856          /**
17857           * <code>optional uint64 inodeId = 2;</code>
17858           */
17859          public long getInodeId() {
17860            return inodeId_;
17861          }
17862    
17863          // optional uint32 numOfDiff = 3;
17864          public static final int NUMOFDIFF_FIELD_NUMBER = 3;
17865          private int numOfDiff_;
17866          /**
17867           * <code>optional uint32 numOfDiff = 3;</code>
17868           */
17869          public boolean hasNumOfDiff() {
17870            return ((bitField0_ & 0x00000004) == 0x00000004);
17871          }
17872          /**
17873           * <code>optional uint32 numOfDiff = 3;</code>
17874           */
17875          public int getNumOfDiff() {
17876            return numOfDiff_;
17877          }
17878    
17879          private void initFields() {
17880            type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
17881            inodeId_ = 0L;
17882            numOfDiff_ = 0;
17883          }
17884          private byte memoizedIsInitialized = -1;
17885          public final boolean isInitialized() {
17886            byte isInitialized = memoizedIsInitialized;
17887            if (isInitialized != -1) return isInitialized == 1;
17888    
17889            if (!hasType()) {
17890              memoizedIsInitialized = 0;
17891              return false;
17892            }
17893            memoizedIsInitialized = 1;
17894            return true;
17895          }
17896    
17897          public void writeTo(com.google.protobuf.CodedOutputStream output)
17898                              throws java.io.IOException {
17899            getSerializedSize();
17900            if (((bitField0_ & 0x00000001) == 0x00000001)) {
17901              output.writeEnum(1, type_.getNumber());
17902            }
17903            if (((bitField0_ & 0x00000002) == 0x00000002)) {
17904              output.writeUInt64(2, inodeId_);
17905            }
17906            if (((bitField0_ & 0x00000004) == 0x00000004)) {
17907              output.writeUInt32(3, numOfDiff_);
17908            }
17909            getUnknownFields().writeTo(output);
17910          }
17911    
17912          private int memoizedSerializedSize = -1;
17913          public int getSerializedSize() {
17914            int size = memoizedSerializedSize;
17915            if (size != -1) return size;
17916    
17917            size = 0;
17918            if (((bitField0_ & 0x00000001) == 0x00000001)) {
17919              size += com.google.protobuf.CodedOutputStream
17920                .computeEnumSize(1, type_.getNumber());
17921            }
17922            if (((bitField0_ & 0x00000002) == 0x00000002)) {
17923              size += com.google.protobuf.CodedOutputStream
17924                .computeUInt64Size(2, inodeId_);
17925            }
17926            if (((bitField0_ & 0x00000004) == 0x00000004)) {
17927              size += com.google.protobuf.CodedOutputStream
17928                .computeUInt32Size(3, numOfDiff_);
17929            }
17930            size += getUnknownFields().getSerializedSize();
17931            memoizedSerializedSize = size;
17932            return size;
17933          }
17934    
17935          private static final long serialVersionUID = 0L;
17936          @java.lang.Override
17937          protected java.lang.Object writeReplace()
17938              throws java.io.ObjectStreamException {
17939            return super.writeReplace();
17940          }
17941    
17942          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17943              com.google.protobuf.ByteString data)
17944              throws com.google.protobuf.InvalidProtocolBufferException {
17945            return PARSER.parseFrom(data);
17946          }
17947          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17948              com.google.protobuf.ByteString data,
17949              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17950              throws com.google.protobuf.InvalidProtocolBufferException {
17951            return PARSER.parseFrom(data, extensionRegistry);
17952          }
17953          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(byte[] data)
17954              throws com.google.protobuf.InvalidProtocolBufferException {
17955            return PARSER.parseFrom(data);
17956          }
17957          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17958              byte[] data,
17959              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17960              throws com.google.protobuf.InvalidProtocolBufferException {
17961            return PARSER.parseFrom(data, extensionRegistry);
17962          }
17963          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(java.io.InputStream input)
17964              throws java.io.IOException {
17965            return PARSER.parseFrom(input);
17966          }
17967          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17968              java.io.InputStream input,
17969              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17970              throws java.io.IOException {
17971            return PARSER.parseFrom(input, extensionRegistry);
17972          }
17973          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseDelimitedFrom(java.io.InputStream input)
17974              throws java.io.IOException {
17975            return PARSER.parseDelimitedFrom(input);
17976          }
17977          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseDelimitedFrom(
17978              java.io.InputStream input,
17979              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17980              throws java.io.IOException {
17981            return PARSER.parseDelimitedFrom(input, extensionRegistry);
17982          }
17983          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17984              com.google.protobuf.CodedInputStream input)
17985              throws java.io.IOException {
17986            return PARSER.parseFrom(input);
17987          }
17988          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17989              com.google.protobuf.CodedInputStream input,
17990              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17991              throws java.io.IOException {
17992            return PARSER.parseFrom(input, extensionRegistry);
17993          }
17994    
17995          public static Builder newBuilder() { return Builder.create(); }
17996          public Builder newBuilderForType() { return newBuilder(); }
17997          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry prototype) {
17998            return newBuilder().mergeFrom(prototype);
17999          }
18000          public Builder toBuilder() { return newBuilder(this); }
18001    
18002          @java.lang.Override
18003          protected Builder newBuilderForType(
18004              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18005            Builder builder = new Builder(parent);
18006            return builder;
18007          }
18008          /**
18009           * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry}
18010           */
18011          public static final class Builder extends
18012              com.google.protobuf.GeneratedMessage.Builder<Builder>
18013             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntryOrBuilder {
18014            public static final com.google.protobuf.Descriptors.Descriptor
18015                getDescriptor() {
18016              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
18017            }
18018    
18019            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18020                internalGetFieldAccessorTable() {
18021              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable
18022                  .ensureFieldAccessorsInitialized(
18023                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Builder.class);
18024            }
18025    
18026            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.newBuilder()
18027            private Builder() {
18028              maybeForceBuilderInitialization();
18029            }
18030    
18031            private Builder(
18032                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18033              super(parent);
18034              maybeForceBuilderInitialization();
18035            }
18036            private void maybeForceBuilderInitialization() {
18037              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18038              }
18039            }
18040            private static Builder create() {
18041              return new Builder();
18042            }
18043    
18044            public Builder clear() {
18045              super.clear();
18046              type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
18047              bitField0_ = (bitField0_ & ~0x00000001);
18048              inodeId_ = 0L;
18049              bitField0_ = (bitField0_ & ~0x00000002);
18050              numOfDiff_ = 0;
18051              bitField0_ = (bitField0_ & ~0x00000004);
18052              return this;
18053            }
18054    
18055            public Builder clone() {
18056              return create().mergeFrom(buildPartial());
18057            }
18058    
18059            public com.google.protobuf.Descriptors.Descriptor
18060                getDescriptorForType() {
18061              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
18062            }
18063    
18064            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry getDefaultInstanceForType() {
18065              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.getDefaultInstance();
18066            }
18067    
18068            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry build() {
18069              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry result = buildPartial();
18070              if (!result.isInitialized()) {
18071                throw newUninitializedMessageException(result);
18072              }
18073              return result;
18074            }
18075    
18076            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry buildPartial() {
18077              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry(this);
18078              int from_bitField0_ = bitField0_;
18079              int to_bitField0_ = 0;
18080              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
18081                to_bitField0_ |= 0x00000001;
18082              }
18083              result.type_ = type_;
18084              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
18085                to_bitField0_ |= 0x00000002;
18086              }
18087              result.inodeId_ = inodeId_;
18088              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
18089                to_bitField0_ |= 0x00000004;
18090              }
18091              result.numOfDiff_ = numOfDiff_;
18092              result.bitField0_ = to_bitField0_;
18093              onBuilt();
18094              return result;
18095            }
18096    
18097            public Builder mergeFrom(com.google.protobuf.Message other) {
18098              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry) {
18099                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry)other);
18100              } else {
18101                super.mergeFrom(other);
18102                return this;
18103              }
18104            }
18105    
18106            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry other) {
18107              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.getDefaultInstance()) return this;
18108              if (other.hasType()) {
18109                setType(other.getType());
18110              }
18111              if (other.hasInodeId()) {
18112                setInodeId(other.getInodeId());
18113              }
18114              if (other.hasNumOfDiff()) {
18115                setNumOfDiff(other.getNumOfDiff());
18116              }
18117              this.mergeUnknownFields(other.getUnknownFields());
18118              return this;
18119            }
18120    
18121            public final boolean isInitialized() {
18122              if (!hasType()) {
18123                
18124                return false;
18125              }
18126              return true;
18127            }
18128    
18129            public Builder mergeFrom(
18130                com.google.protobuf.CodedInputStream input,
18131                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18132                throws java.io.IOException {
18133              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parsedMessage = null;
18134              try {
18135                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18136              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18137                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry) e.getUnfinishedMessage();
18138                throw e;
18139              } finally {
18140                if (parsedMessage != null) {
18141                  mergeFrom(parsedMessage);
18142                }
18143              }
18144              return this;
18145            }
18146            private int bitField0_;
18147    
18148            // required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;
18149            private org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
18150            /**
18151             * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
18152             */
18153            public boolean hasType() {
18154              return ((bitField0_ & 0x00000001) == 0x00000001);
18155            }
18156            /**
18157             * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
18158             */
18159            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type getType() {
18160              return type_;
18161            }
18162            /**
18163             * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
18164             */
18165            public Builder setType(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type value) {
18166              if (value == null) {
18167                throw new NullPointerException();
18168              }
18169              bitField0_ |= 0x00000001;
18170              type_ = value;
18171              onChanged();
18172              return this;
18173            }
18174            /**
18175             * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
18176             */
18177            public Builder clearType() {
18178              bitField0_ = (bitField0_ & ~0x00000001);
18179              type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
18180              onChanged();
18181              return this;
18182            }
18183    
18184            // optional uint64 inodeId = 2;
18185            private long inodeId_ ;
18186            /**
18187             * <code>optional uint64 inodeId = 2;</code>
18188             */
18189            public boolean hasInodeId() {
18190              return ((bitField0_ & 0x00000002) == 0x00000002);
18191            }
18192            /**
18193             * <code>optional uint64 inodeId = 2;</code>
18194             */
18195            public long getInodeId() {
18196              return inodeId_;
18197            }
18198            /**
18199             * <code>optional uint64 inodeId = 2;</code>
18200             */
18201            public Builder setInodeId(long value) {
18202              bitField0_ |= 0x00000002;
18203              inodeId_ = value;
18204              onChanged();
18205              return this;
18206            }
18207            /**
18208             * <code>optional uint64 inodeId = 2;</code>
18209             */
18210            public Builder clearInodeId() {
18211              bitField0_ = (bitField0_ & ~0x00000002);
18212              inodeId_ = 0L;
18213              onChanged();
18214              return this;
18215            }
18216    
18217            // optional uint32 numOfDiff = 3;
18218            private int numOfDiff_ ;
18219            /**
18220             * <code>optional uint32 numOfDiff = 3;</code>
18221             */
18222            public boolean hasNumOfDiff() {
18223              return ((bitField0_ & 0x00000004) == 0x00000004);
18224            }
18225            /**
18226             * <code>optional uint32 numOfDiff = 3;</code>
18227             */
18228            public int getNumOfDiff() {
18229              return numOfDiff_;
18230            }
18231            /**
18232             * <code>optional uint32 numOfDiff = 3;</code>
18233             */
18234            public Builder setNumOfDiff(int value) {
18235              bitField0_ |= 0x00000004;
18236              numOfDiff_ = value;
18237              onChanged();
18238              return this;
18239            }
18240            /**
18241             * <code>optional uint32 numOfDiff = 3;</code>
18242             */
18243            public Builder clearNumOfDiff() {
18244              bitField0_ = (bitField0_ & ~0x00000004);
18245              numOfDiff_ = 0;
18246              onChanged();
18247              return this;
18248            }
18249    
18250            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry)
18251          }
18252    
18253          static {
18254            defaultInstance = new DiffEntry(true);
18255            defaultInstance.initFields();
18256          }
18257    
18258          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry)
18259        }
18260    
18261        private void initFields() {
18262        }
18263        private byte memoizedIsInitialized = -1;
18264        public final boolean isInitialized() {
18265          byte isInitialized = memoizedIsInitialized;
18266          if (isInitialized != -1) return isInitialized == 1;
18267    
18268          memoizedIsInitialized = 1;
18269          return true;
18270        }
18271    
18272        public void writeTo(com.google.protobuf.CodedOutputStream output)
18273                            throws java.io.IOException {
18274          getSerializedSize();
18275          getUnknownFields().writeTo(output);
18276        }
18277    
18278        private int memoizedSerializedSize = -1;
18279        public int getSerializedSize() {
18280          int size = memoizedSerializedSize;
18281          if (size != -1) return size;
18282    
18283          size = 0;
18284          size += getUnknownFields().getSerializedSize();
18285          memoizedSerializedSize = size;
18286          return size;
18287        }
18288    
18289        private static final long serialVersionUID = 0L;
18290        @java.lang.Override
18291        protected java.lang.Object writeReplace()
18292            throws java.io.ObjectStreamException {
18293          return super.writeReplace();
18294        }
18295    
18296        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18297            com.google.protobuf.ByteString data)
18298            throws com.google.protobuf.InvalidProtocolBufferException {
18299          return PARSER.parseFrom(data);
18300        }
18301        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18302            com.google.protobuf.ByteString data,
18303            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18304            throws com.google.protobuf.InvalidProtocolBufferException {
18305          return PARSER.parseFrom(data, extensionRegistry);
18306        }
18307        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(byte[] data)
18308            throws com.google.protobuf.InvalidProtocolBufferException {
18309          return PARSER.parseFrom(data);
18310        }
18311        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18312            byte[] data,
18313            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18314            throws com.google.protobuf.InvalidProtocolBufferException {
18315          return PARSER.parseFrom(data, extensionRegistry);
18316        }
18317        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(java.io.InputStream input)
18318            throws java.io.IOException {
18319          return PARSER.parseFrom(input);
18320        }
18321        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18322            java.io.InputStream input,
18323            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18324            throws java.io.IOException {
18325          return PARSER.parseFrom(input, extensionRegistry);
18326        }
18327        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseDelimitedFrom(java.io.InputStream input)
18328            throws java.io.IOException {
18329          return PARSER.parseDelimitedFrom(input);
18330        }
18331        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseDelimitedFrom(
18332            java.io.InputStream input,
18333            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18334            throws java.io.IOException {
18335          return PARSER.parseDelimitedFrom(input, extensionRegistry);
18336        }
18337        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18338            com.google.protobuf.CodedInputStream input)
18339            throws java.io.IOException {
18340          return PARSER.parseFrom(input);
18341        }
18342        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18343            com.google.protobuf.CodedInputStream input,
18344            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18345            throws java.io.IOException {
18346          return PARSER.parseFrom(input, extensionRegistry);
18347        }
18348    
18349        public static Builder newBuilder() { return Builder.create(); }
18350        public Builder newBuilderForType() { return newBuilder(); }
18351        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection prototype) {
18352          return newBuilder().mergeFrom(prototype);
18353        }
18354        public Builder toBuilder() { return newBuilder(this); }
18355    
18356        @java.lang.Override
18357        protected Builder newBuilderForType(
18358            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18359          Builder builder = new Builder(parent);
18360          return builder;
18361        }
18362        /**
18363         * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection}
18364         *
18365         * <pre>
18366         **
18367         * This section records information about snapshot diffs
18368         * NAME: SNAPSHOT_DIFF
18369         * </pre>
18370         */
18371        public static final class Builder extends
18372            com.google.protobuf.GeneratedMessage.Builder<Builder>
18373           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSectionOrBuilder {
18374          public static final com.google.protobuf.Descriptors.Descriptor
18375              getDescriptor() {
18376            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
18377          }
18378    
18379          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18380              internalGetFieldAccessorTable() {
18381            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable
18382                .ensureFieldAccessorsInitialized(
18383                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.Builder.class);
18384          }
18385    
18386          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.newBuilder()
18387          private Builder() {
18388            maybeForceBuilderInitialization();
18389          }
18390    
18391          private Builder(
18392              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18393            super(parent);
18394            maybeForceBuilderInitialization();
18395          }
18396          private void maybeForceBuilderInitialization() {
18397            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18398            }
18399          }
18400          private static Builder create() {
18401            return new Builder();
18402          }
18403    
18404          public Builder clear() {
18405            super.clear();
18406            return this;
18407          }
18408    
18409          public Builder clone() {
18410            return create().mergeFrom(buildPartial());
18411          }
18412    
18413          public com.google.protobuf.Descriptors.Descriptor
18414              getDescriptorForType() {
18415            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
18416          }
18417    
18418          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection getDefaultInstanceForType() {
18419            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.getDefaultInstance();
18420          }
18421    
18422          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection build() {
18423            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection result = buildPartial();
18424            if (!result.isInitialized()) {
18425              throw newUninitializedMessageException(result);
18426            }
18427            return result;
18428          }
18429    
18430          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection buildPartial() {
18431            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection(this);
18432            onBuilt();
18433            return result;
18434          }
18435    
18436          public Builder mergeFrom(com.google.protobuf.Message other) {
18437            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection) {
18438              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection)other);
18439            } else {
18440              super.mergeFrom(other);
18441              return this;
18442            }
18443          }
18444    
18445          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection other) {
18446            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.getDefaultInstance()) return this;
18447            this.mergeUnknownFields(other.getUnknownFields());
18448            return this;
18449          }
18450    
18451          public final boolean isInitialized() {
18452            return true;
18453          }
18454    
18455          public Builder mergeFrom(
18456              com.google.protobuf.CodedInputStream input,
18457              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18458              throws java.io.IOException {
18459            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parsedMessage = null;
18460            try {
18461              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18462            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18463              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection) e.getUnfinishedMessage();
18464              throw e;
18465            } finally {
18466              if (parsedMessage != null) {
18467                mergeFrom(parsedMessage);
18468              }
18469            }
18470            return this;
18471          }
18472    
18473          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection)
18474        }
18475    
18476        static {
18477          defaultInstance = new SnapshotDiffSection(true);
18478          defaultInstance.initFields();
18479        }
18480    
18481        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection)
18482      }
18483    
18484      public interface StringTableSectionOrBuilder
18485          extends com.google.protobuf.MessageOrBuilder {
18486    
18487        // optional uint32 numEntry = 1;
18488        /**
18489         * <code>optional uint32 numEntry = 1;</code>
18490         *
18491         * <pre>
18492         * repeated Entry
18493         * </pre>
18494         */
18495        boolean hasNumEntry();
18496        /**
18497         * <code>optional uint32 numEntry = 1;</code>
18498         *
18499         * <pre>
18500         * repeated Entry
18501         * </pre>
18502         */
18503        int getNumEntry();
18504      }
18505      /**
18506       * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection}
18507       *
18508       * <pre>
18509       **
18510       * This section maps string to id
18511       * NAME: STRING_TABLE
18512       * </pre>
18513       */
18514      public static final class StringTableSection extends
18515          com.google.protobuf.GeneratedMessage
18516          implements StringTableSectionOrBuilder {
18517        // Use StringTableSection.newBuilder() to construct.
18518        private StringTableSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
18519          super(builder);
18520          this.unknownFields = builder.getUnknownFields();
18521        }
18522        private StringTableSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18523    
18524        private static final StringTableSection defaultInstance;
18525        public static StringTableSection getDefaultInstance() {
18526          return defaultInstance;
18527        }
18528    
18529        public StringTableSection getDefaultInstanceForType() {
18530          return defaultInstance;
18531        }
18532    
18533        private final com.google.protobuf.UnknownFieldSet unknownFields;
18534        @java.lang.Override
18535        public final com.google.protobuf.UnknownFieldSet
18536            getUnknownFields() {
18537          return this.unknownFields;
18538        }
18539        private StringTableSection(
18540            com.google.protobuf.CodedInputStream input,
18541            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18542            throws com.google.protobuf.InvalidProtocolBufferException {
18543          initFields();
18544          int mutable_bitField0_ = 0;
18545          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18546              com.google.protobuf.UnknownFieldSet.newBuilder();
18547          try {
18548            boolean done = false;
18549            while (!done) {
18550              int tag = input.readTag();
18551              switch (tag) {
18552                case 0:
18553                  done = true;
18554                  break;
18555                default: {
18556                  if (!parseUnknownField(input, unknownFields,
18557                                         extensionRegistry, tag)) {
18558                    done = true;
18559                  }
18560                  break;
18561                }
18562                case 8: {
18563                  bitField0_ |= 0x00000001;
18564                  numEntry_ = input.readUInt32();
18565                  break;
18566                }
18567              }
18568            }
18569          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18570            throw e.setUnfinishedMessage(this);
18571          } catch (java.io.IOException e) {
18572            throw new com.google.protobuf.InvalidProtocolBufferException(
18573                e.getMessage()).setUnfinishedMessage(this);
18574          } finally {
18575            this.unknownFields = unknownFields.build();
18576            makeExtensionsImmutable();
18577          }
18578        }
18579        public static final com.google.protobuf.Descriptors.Descriptor
18580            getDescriptor() {
18581          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
18582        }
18583    
18584        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18585            internalGetFieldAccessorTable() {
18586          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable
18587              .ensureFieldAccessorsInitialized(
18588                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Builder.class);
18589        }
18590    
18591        public static com.google.protobuf.Parser<StringTableSection> PARSER =
18592            new com.google.protobuf.AbstractParser<StringTableSection>() {
18593          public StringTableSection parsePartialFrom(
18594              com.google.protobuf.CodedInputStream input,
18595              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18596              throws com.google.protobuf.InvalidProtocolBufferException {
18597            return new StringTableSection(input, extensionRegistry);
18598          }
18599        };
18600    
18601        @java.lang.Override
18602        public com.google.protobuf.Parser<StringTableSection> getParserForType() {
18603          return PARSER;
18604        }
18605    
18606        public interface EntryOrBuilder
18607            extends com.google.protobuf.MessageOrBuilder {
18608    
18609          // optional uint32 id = 1;
18610          /**
18611           * <code>optional uint32 id = 1;</code>
18612           */
18613          boolean hasId();
18614          /**
18615           * <code>optional uint32 id = 1;</code>
18616           */
18617          int getId();
18618    
18619          // optional string str = 2;
18620          /**
18621           * <code>optional string str = 2;</code>
18622           */
18623          boolean hasStr();
18624          /**
18625           * <code>optional string str = 2;</code>
18626           */
18627          java.lang.String getStr();
18628          /**
18629           * <code>optional string str = 2;</code>
18630           */
18631          com.google.protobuf.ByteString
18632              getStrBytes();
18633        }
18634        /**
18635         * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection.Entry}
18636         */
18637        public static final class Entry extends
18638            com.google.protobuf.GeneratedMessage
18639            implements EntryOrBuilder {
18640          // Use Entry.newBuilder() to construct.
18641          private Entry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
18642            super(builder);
18643            this.unknownFields = builder.getUnknownFields();
18644          }
18645          private Entry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18646    
18647          private static final Entry defaultInstance;
18648          public static Entry getDefaultInstance() {
18649            return defaultInstance;
18650          }
18651    
18652          public Entry getDefaultInstanceForType() {
18653            return defaultInstance;
18654          }
18655    
18656          private final com.google.protobuf.UnknownFieldSet unknownFields;
18657          @java.lang.Override
18658          public final com.google.protobuf.UnknownFieldSet
18659              getUnknownFields() {
18660            return this.unknownFields;
18661          }
18662          private Entry(
18663              com.google.protobuf.CodedInputStream input,
18664              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18665              throws com.google.protobuf.InvalidProtocolBufferException {
18666            initFields();
18667            int mutable_bitField0_ = 0;
18668            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18669                com.google.protobuf.UnknownFieldSet.newBuilder();
18670            try {
18671              boolean done = false;
18672              while (!done) {
18673                int tag = input.readTag();
18674                switch (tag) {
18675                  case 0:
18676                    done = true;
18677                    break;
18678                  default: {
18679                    if (!parseUnknownField(input, unknownFields,
18680                                           extensionRegistry, tag)) {
18681                      done = true;
18682                    }
18683                    break;
18684                  }
18685                  case 8: {
18686                    bitField0_ |= 0x00000001;
18687                    id_ = input.readUInt32();
18688                    break;
18689                  }
18690                  case 18: {
18691                    bitField0_ |= 0x00000002;
18692                    str_ = input.readBytes();
18693                    break;
18694                  }
18695                }
18696              }
18697            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18698              throw e.setUnfinishedMessage(this);
18699            } catch (java.io.IOException e) {
18700              throw new com.google.protobuf.InvalidProtocolBufferException(
18701                  e.getMessage()).setUnfinishedMessage(this);
18702            } finally {
18703              this.unknownFields = unknownFields.build();
18704              makeExtensionsImmutable();
18705            }
18706          }
18707          public static final com.google.protobuf.Descriptors.Descriptor
18708              getDescriptor() {
18709            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
18710          }
18711    
18712          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18713              internalGetFieldAccessorTable() {
18714            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable
18715                .ensureFieldAccessorsInitialized(
18716                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.Builder.class);
18717          }
18718    
18719          public static com.google.protobuf.Parser<Entry> PARSER =
18720              new com.google.protobuf.AbstractParser<Entry>() {
18721            public Entry parsePartialFrom(
18722                com.google.protobuf.CodedInputStream input,
18723                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18724                throws com.google.protobuf.InvalidProtocolBufferException {
18725              return new Entry(input, extensionRegistry);
18726            }
18727          };
18728    
18729          @java.lang.Override
18730          public com.google.protobuf.Parser<Entry> getParserForType() {
18731            return PARSER;
18732          }
18733    
18734          private int bitField0_;
18735          // optional uint32 id = 1;
18736          public static final int ID_FIELD_NUMBER = 1;
18737          private int id_;
18738          /**
18739           * <code>optional uint32 id = 1;</code>
18740           */
18741          public boolean hasId() {
18742            return ((bitField0_ & 0x00000001) == 0x00000001);
18743          }
18744          /**
18745           * <code>optional uint32 id = 1;</code>
18746           */
18747          public int getId() {
18748            return id_;
18749          }
18750    
18751          // optional string str = 2;
18752          public static final int STR_FIELD_NUMBER = 2;
18753          private java.lang.Object str_;
18754          /**
18755           * <code>optional string str = 2;</code>
18756           */
18757          public boolean hasStr() {
18758            return ((bitField0_ & 0x00000002) == 0x00000002);
18759          }
18760          /**
18761           * <code>optional string str = 2;</code>
18762           */
18763          public java.lang.String getStr() {
18764            java.lang.Object ref = str_;
18765            if (ref instanceof java.lang.String) {
18766              return (java.lang.String) ref;
18767            } else {
18768              com.google.protobuf.ByteString bs = 
18769                  (com.google.protobuf.ByteString) ref;
18770              java.lang.String s = bs.toStringUtf8();
18771              if (bs.isValidUtf8()) {
18772                str_ = s;
18773              }
18774              return s;
18775            }
18776          }
18777          /**
18778           * <code>optional string str = 2;</code>
18779           */
18780          public com.google.protobuf.ByteString
18781              getStrBytes() {
18782            java.lang.Object ref = str_;
18783            if (ref instanceof java.lang.String) {
18784              com.google.protobuf.ByteString b = 
18785                  com.google.protobuf.ByteString.copyFromUtf8(
18786                      (java.lang.String) ref);
18787              str_ = b;
18788              return b;
18789            } else {
18790              return (com.google.protobuf.ByteString) ref;
18791            }
18792          }
18793    
18794          private void initFields() {
18795            id_ = 0;
18796            str_ = "";
18797          }
18798          private byte memoizedIsInitialized = -1;
18799          public final boolean isInitialized() {
18800            byte isInitialized = memoizedIsInitialized;
18801            if (isInitialized != -1) return isInitialized == 1;
18802    
18803            memoizedIsInitialized = 1;
18804            return true;
18805          }
18806    
18807          public void writeTo(com.google.protobuf.CodedOutputStream output)
18808                              throws java.io.IOException {
18809            getSerializedSize();
18810            if (((bitField0_ & 0x00000001) == 0x00000001)) {
18811              output.writeUInt32(1, id_);
18812            }
18813            if (((bitField0_ & 0x00000002) == 0x00000002)) {
18814              output.writeBytes(2, getStrBytes());
18815            }
18816            getUnknownFields().writeTo(output);
18817          }
18818    
18819          private int memoizedSerializedSize = -1;
18820          public int getSerializedSize() {
18821            int size = memoizedSerializedSize;
18822            if (size != -1) return size;
18823    
18824            size = 0;
18825            if (((bitField0_ & 0x00000001) == 0x00000001)) {
18826              size += com.google.protobuf.CodedOutputStream
18827                .computeUInt32Size(1, id_);
18828            }
18829            if (((bitField0_ & 0x00000002) == 0x00000002)) {
18830              size += com.google.protobuf.CodedOutputStream
18831                .computeBytesSize(2, getStrBytes());
18832            }
18833            size += getUnknownFields().getSerializedSize();
18834            memoizedSerializedSize = size;
18835            return size;
18836          }
18837    
18838          private static final long serialVersionUID = 0L;
18839          @java.lang.Override
18840          protected java.lang.Object writeReplace()
18841              throws java.io.ObjectStreamException {
18842            return super.writeReplace();
18843          }
18844    
18845          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18846              com.google.protobuf.ByteString data)
18847              throws com.google.protobuf.InvalidProtocolBufferException {
18848            return PARSER.parseFrom(data);
18849          }
18850          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18851              com.google.protobuf.ByteString data,
18852              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18853              throws com.google.protobuf.InvalidProtocolBufferException {
18854            return PARSER.parseFrom(data, extensionRegistry);
18855          }
18856          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(byte[] data)
18857              throws com.google.protobuf.InvalidProtocolBufferException {
18858            return PARSER.parseFrom(data);
18859          }
18860          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18861              byte[] data,
18862              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18863              throws com.google.protobuf.InvalidProtocolBufferException {
18864            return PARSER.parseFrom(data, extensionRegistry);
18865          }
18866          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(java.io.InputStream input)
18867              throws java.io.IOException {
18868            return PARSER.parseFrom(input);
18869          }
18870          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18871              java.io.InputStream input,
18872              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18873              throws java.io.IOException {
18874            return PARSER.parseFrom(input, extensionRegistry);
18875          }
18876          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseDelimitedFrom(java.io.InputStream input)
18877              throws java.io.IOException {
18878            return PARSER.parseDelimitedFrom(input);
18879          }
18880          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseDelimitedFrom(
18881              java.io.InputStream input,
18882              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18883              throws java.io.IOException {
18884            return PARSER.parseDelimitedFrom(input, extensionRegistry);
18885          }
18886          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18887              com.google.protobuf.CodedInputStream input)
18888              throws java.io.IOException {
18889            return PARSER.parseFrom(input);
18890          }
18891          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18892              com.google.protobuf.CodedInputStream input,
18893              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18894              throws java.io.IOException {
18895            return PARSER.parseFrom(input, extensionRegistry);
18896          }
18897    
18898          public static Builder newBuilder() { return Builder.create(); }
18899          public Builder newBuilderForType() { return newBuilder(); }
18900          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry prototype) {
18901            return newBuilder().mergeFrom(prototype);
18902          }
18903          public Builder toBuilder() { return newBuilder(this); }
18904    
18905          @java.lang.Override
18906          protected Builder newBuilderForType(
18907              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18908            Builder builder = new Builder(parent);
18909            return builder;
18910          }
18911          /**
18912           * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection.Entry}
18913           */
18914          public static final class Builder extends
18915              com.google.protobuf.GeneratedMessage.Builder<Builder>
18916             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.EntryOrBuilder {
18917            public static final com.google.protobuf.Descriptors.Descriptor
18918                getDescriptor() {
18919              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
18920            }
18921    
18922            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18923                internalGetFieldAccessorTable() {
18924              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable
18925                  .ensureFieldAccessorsInitialized(
18926                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.Builder.class);
18927            }
18928    
18929            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.newBuilder()
18930            private Builder() {
18931              maybeForceBuilderInitialization();
18932            }
18933    
18934            private Builder(
18935                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18936              super(parent);
18937              maybeForceBuilderInitialization();
18938            }
18939            private void maybeForceBuilderInitialization() {
18940              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18941              }
18942            }
18943            private static Builder create() {
18944              return new Builder();
18945            }
18946    
18947            public Builder clear() {
18948              super.clear();
18949              id_ = 0;
18950              bitField0_ = (bitField0_ & ~0x00000001);
18951              str_ = "";
18952              bitField0_ = (bitField0_ & ~0x00000002);
18953              return this;
18954            }
18955    
18956            public Builder clone() {
18957              return create().mergeFrom(buildPartial());
18958            }
18959    
18960            public com.google.protobuf.Descriptors.Descriptor
18961                getDescriptorForType() {
18962              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
18963            }
18964    
18965            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry getDefaultInstanceForType() {
18966              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.getDefaultInstance();
18967            }
18968    
18969            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry build() {
18970              org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry result = buildPartial();
18971              if (!result.isInitialized()) {
18972                throw newUninitializedMessageException(result);
18973              }
18974              return result;
18975            }
18976    
18977            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry buildPartial() {
18978              org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry(this);
18979              int from_bitField0_ = bitField0_;
18980              int to_bitField0_ = 0;
18981              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
18982                to_bitField0_ |= 0x00000001;
18983              }
18984              result.id_ = id_;
18985              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
18986                to_bitField0_ |= 0x00000002;
18987              }
18988              result.str_ = str_;
18989              result.bitField0_ = to_bitField0_;
18990              onBuilt();
18991              return result;
18992            }
18993    
18994            public Builder mergeFrom(com.google.protobuf.Message other) {
18995              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry) {
18996                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry)other);
18997              } else {
18998                super.mergeFrom(other);
18999                return this;
19000              }
19001            }
19002    
19003            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry other) {
19004              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.getDefaultInstance()) return this;
19005              if (other.hasId()) {
19006                setId(other.getId());
19007              }
19008              if (other.hasStr()) {
19009                bitField0_ |= 0x00000002;
19010                str_ = other.str_;
19011                onChanged();
19012              }
19013              this.mergeUnknownFields(other.getUnknownFields());
19014              return this;
19015            }
19016    
19017            public final boolean isInitialized() {
19018              return true;
19019            }
19020    
19021            public Builder mergeFrom(
19022                com.google.protobuf.CodedInputStream input,
19023                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19024                throws java.io.IOException {
19025              org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parsedMessage = null;
19026              try {
19027                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
19028              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19029                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry) e.getUnfinishedMessage();
19030                throw e;
19031              } finally {
19032                if (parsedMessage != null) {
19033                  mergeFrom(parsedMessage);
19034                }
19035              }
19036              return this;
19037            }
19038            private int bitField0_;
19039    
19040            // optional uint32 id = 1;
19041            private int id_ ;
19042            /**
19043             * <code>optional uint32 id = 1;</code>
19044             */
19045            public boolean hasId() {
19046              return ((bitField0_ & 0x00000001) == 0x00000001);
19047            }
19048            /**
19049             * <code>optional uint32 id = 1;</code>
19050             */
19051            public int getId() {
19052              return id_;
19053            }
19054            /**
19055             * <code>optional uint32 id = 1;</code>
19056             */
19057            public Builder setId(int value) {
19058              bitField0_ |= 0x00000001;
19059              id_ = value;
19060              onChanged();
19061              return this;
19062            }
19063            /**
19064             * <code>optional uint32 id = 1;</code>
19065             */
19066            public Builder clearId() {
19067              bitField0_ = (bitField0_ & ~0x00000001);
19068              id_ = 0;
19069              onChanged();
19070              return this;
19071            }
19072    
19073            // optional string str = 2;
19074            private java.lang.Object str_ = "";
19075            /**
19076             * <code>optional string str = 2;</code>
19077             */
19078            public boolean hasStr() {
19079              return ((bitField0_ & 0x00000002) == 0x00000002);
19080            }
19081            /**
19082             * <code>optional string str = 2;</code>
19083             */
19084            public java.lang.String getStr() {
19085              java.lang.Object ref = str_;
19086              if (!(ref instanceof java.lang.String)) {
19087                java.lang.String s = ((com.google.protobuf.ByteString) ref)
19088                    .toStringUtf8();
19089                str_ = s;
19090                return s;
19091              } else {
19092                return (java.lang.String) ref;
19093              }
19094            }
19095            /**
19096             * <code>optional string str = 2;</code>
19097             */
19098            public com.google.protobuf.ByteString
19099                getStrBytes() {
19100              java.lang.Object ref = str_;
19101              if (ref instanceof String) {
19102                com.google.protobuf.ByteString b = 
19103                    com.google.protobuf.ByteString.copyFromUtf8(
19104                        (java.lang.String) ref);
19105                str_ = b;
19106                return b;
19107              } else {
19108                return (com.google.protobuf.ByteString) ref;
19109              }
19110            }
19111            /**
19112             * <code>optional string str = 2;</code>
19113             */
19114            public Builder setStr(
19115                java.lang.String value) {
19116              if (value == null) {
19117        throw new NullPointerException();
19118      }
19119      bitField0_ |= 0x00000002;
19120              str_ = value;
19121              onChanged();
19122              return this;
19123            }
19124            /**
19125             * <code>optional string str = 2;</code>
19126             */
19127            public Builder clearStr() {
19128              bitField0_ = (bitField0_ & ~0x00000002);
19129              str_ = getDefaultInstance().getStr();
19130              onChanged();
19131              return this;
19132            }
19133            /**
19134             * <code>optional string str = 2;</code>
19135             */
19136            public Builder setStrBytes(
19137                com.google.protobuf.ByteString value) {
19138              if (value == null) {
19139        throw new NullPointerException();
19140      }
19141      bitField0_ |= 0x00000002;
19142              str_ = value;
19143              onChanged();
19144              return this;
19145            }
19146    
19147            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.StringTableSection.Entry)
19148          }
19149    
19150          static {
19151            defaultInstance = new Entry(true);
19152            defaultInstance.initFields();
19153          }
19154    
19155          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.StringTableSection.Entry)
19156        }
19157    
19158        private int bitField0_;
19159        // optional uint32 numEntry = 1;
19160        public static final int NUMENTRY_FIELD_NUMBER = 1;
19161        private int numEntry_;
19162        /**
19163         * <code>optional uint32 numEntry = 1;</code>
19164         *
19165         * <pre>
19166         * repeated Entry
19167         * </pre>
19168         */
19169        public boolean hasNumEntry() {
19170          return ((bitField0_ & 0x00000001) == 0x00000001);
19171        }
19172        /**
19173         * <code>optional uint32 numEntry = 1;</code>
19174         *
19175         * <pre>
19176         * repeated Entry
19177         * </pre>
19178         */
19179        public int getNumEntry() {
19180          return numEntry_;
19181        }
19182    
19183        private void initFields() {
19184          numEntry_ = 0;
19185        }
19186        private byte memoizedIsInitialized = -1;
19187        public final boolean isInitialized() {
19188          byte isInitialized = memoizedIsInitialized;
19189          if (isInitialized != -1) return isInitialized == 1;
19190    
19191          memoizedIsInitialized = 1;
19192          return true;
19193        }
19194    
19195        public void writeTo(com.google.protobuf.CodedOutputStream output)
19196                            throws java.io.IOException {
19197          getSerializedSize();
19198          if (((bitField0_ & 0x00000001) == 0x00000001)) {
19199            output.writeUInt32(1, numEntry_);
19200          }
19201          getUnknownFields().writeTo(output);
19202        }
19203    
19204        private int memoizedSerializedSize = -1;
19205        public int getSerializedSize() {
19206          int size = memoizedSerializedSize;
19207          if (size != -1) return size;
19208    
19209          size = 0;
19210          if (((bitField0_ & 0x00000001) == 0x00000001)) {
19211            size += com.google.protobuf.CodedOutputStream
19212              .computeUInt32Size(1, numEntry_);
19213          }
19214          size += getUnknownFields().getSerializedSize();
19215          memoizedSerializedSize = size;
19216          return size;
19217        }
19218    
19219        private static final long serialVersionUID = 0L;
19220        @java.lang.Override
19221        protected java.lang.Object writeReplace()
19222            throws java.io.ObjectStreamException {
19223          return super.writeReplace();
19224        }
19225    
19226        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19227            com.google.protobuf.ByteString data)
19228            throws com.google.protobuf.InvalidProtocolBufferException {
19229          return PARSER.parseFrom(data);
19230        }
19231        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19232            com.google.protobuf.ByteString data,
19233            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19234            throws com.google.protobuf.InvalidProtocolBufferException {
19235          return PARSER.parseFrom(data, extensionRegistry);
19236        }
19237        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(byte[] data)
19238            throws com.google.protobuf.InvalidProtocolBufferException {
19239          return PARSER.parseFrom(data);
19240        }
19241        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19242            byte[] data,
19243            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19244            throws com.google.protobuf.InvalidProtocolBufferException {
19245          return PARSER.parseFrom(data, extensionRegistry);
19246        }
19247        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(java.io.InputStream input)
19248            throws java.io.IOException {
19249          return PARSER.parseFrom(input);
19250        }
19251        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19252            java.io.InputStream input,
19253            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19254            throws java.io.IOException {
19255          return PARSER.parseFrom(input, extensionRegistry);
19256        }
19257        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseDelimitedFrom(java.io.InputStream input)
19258            throws java.io.IOException {
19259          return PARSER.parseDelimitedFrom(input);
19260        }
19261        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseDelimitedFrom(
19262            java.io.InputStream input,
19263            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19264            throws java.io.IOException {
19265          return PARSER.parseDelimitedFrom(input, extensionRegistry);
19266        }
19267        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19268            com.google.protobuf.CodedInputStream input)
19269            throws java.io.IOException {
19270          return PARSER.parseFrom(input);
19271        }
19272        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19273            com.google.protobuf.CodedInputStream input,
19274            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19275            throws java.io.IOException {
19276          return PARSER.parseFrom(input, extensionRegistry);
19277        }
19278    
19279        public static Builder newBuilder() { return Builder.create(); }
19280        public Builder newBuilderForType() { return newBuilder(); }
19281        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection prototype) {
19282          return newBuilder().mergeFrom(prototype);
19283        }
19284        public Builder toBuilder() { return newBuilder(this); }
19285    
19286        @java.lang.Override
19287        protected Builder newBuilderForType(
19288            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19289          Builder builder = new Builder(parent);
19290          return builder;
19291        }
19292        /**
19293         * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection}
19294         *
19295         * <pre>
19296         **
19297         * This section maps string to id
19298         * NAME: STRING_TABLE
19299         * </pre>
19300         */
19301        public static final class Builder extends
19302            com.google.protobuf.GeneratedMessage.Builder<Builder>
19303           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSectionOrBuilder {
19304          public static final com.google.protobuf.Descriptors.Descriptor
19305              getDescriptor() {
19306            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
19307          }
19308    
19309          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19310              internalGetFieldAccessorTable() {
19311            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable
19312                .ensureFieldAccessorsInitialized(
19313                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Builder.class);
19314          }
19315    
19316          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.newBuilder()
19317          private Builder() {
19318            maybeForceBuilderInitialization();
19319          }
19320    
19321          private Builder(
19322              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19323            super(parent);
19324            maybeForceBuilderInitialization();
19325          }
19326          private void maybeForceBuilderInitialization() {
19327            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19328            }
19329          }
19330          private static Builder create() {
19331            return new Builder();
19332          }
19333    
19334          public Builder clear() {
19335            super.clear();
19336            numEntry_ = 0;
19337            bitField0_ = (bitField0_ & ~0x00000001);
19338            return this;
19339          }
19340    
19341          public Builder clone() {
19342            return create().mergeFrom(buildPartial());
19343          }
19344    
19345          public com.google.protobuf.Descriptors.Descriptor
19346              getDescriptorForType() {
19347            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
19348          }
19349    
19350          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection getDefaultInstanceForType() {
19351            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.getDefaultInstance();
19352          }
19353    
19354          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection build() {
19355            org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection result = buildPartial();
19356            if (!result.isInitialized()) {
19357              throw newUninitializedMessageException(result);
19358            }
19359            return result;
19360          }
19361    
19362          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection buildPartial() {
19363            org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection(this);
19364            int from_bitField0_ = bitField0_;
19365            int to_bitField0_ = 0;
19366            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
19367              to_bitField0_ |= 0x00000001;
19368            }
19369            result.numEntry_ = numEntry_;
19370            result.bitField0_ = to_bitField0_;
19371            onBuilt();
19372            return result;
19373          }
19374    
19375          public Builder mergeFrom(com.google.protobuf.Message other) {
19376            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection) {
19377              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection)other);
19378            } else {
19379              super.mergeFrom(other);
19380              return this;
19381            }
19382          }
19383    
19384          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection other) {
19385            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.getDefaultInstance()) return this;
19386            if (other.hasNumEntry()) {
19387              setNumEntry(other.getNumEntry());
19388            }
19389            this.mergeUnknownFields(other.getUnknownFields());
19390            return this;
19391          }
19392    
19393          public final boolean isInitialized() {
19394            return true;
19395          }
19396    
19397          public Builder mergeFrom(
19398              com.google.protobuf.CodedInputStream input,
19399              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19400              throws java.io.IOException {
19401            org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parsedMessage = null;
19402            try {
19403              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
19404            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19405              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection) e.getUnfinishedMessage();
19406              throw e;
19407            } finally {
19408              if (parsedMessage != null) {
19409                mergeFrom(parsedMessage);
19410              }
19411            }
19412            return this;
19413          }
19414          private int bitField0_;
19415    
19416          // optional uint32 numEntry = 1;
19417          private int numEntry_ ;
19418          /**
19419           * <code>optional uint32 numEntry = 1;</code>
19420           *
19421           * <pre>
19422           * repeated Entry
19423           * </pre>
19424           */
19425          public boolean hasNumEntry() {
19426            return ((bitField0_ & 0x00000001) == 0x00000001);
19427          }
19428          /**
19429           * <code>optional uint32 numEntry = 1;</code>
19430           *
19431           * <pre>
19432           * repeated Entry
19433           * </pre>
19434           */
19435          public int getNumEntry() {
19436            return numEntry_;
19437          }
19438          /**
19439           * <code>optional uint32 numEntry = 1;</code>
19440           *
19441           * <pre>
19442           * repeated Entry
19443           * </pre>
19444           */
19445          public Builder setNumEntry(int value) {
19446            bitField0_ |= 0x00000001;
19447            numEntry_ = value;
19448            onChanged();
19449            return this;
19450          }
19451          /**
19452           * <code>optional uint32 numEntry = 1;</code>
19453           *
19454           * <pre>
19455           * repeated Entry
19456           * </pre>
19457           */
19458          public Builder clearNumEntry() {
19459            bitField0_ = (bitField0_ & ~0x00000001);
19460            numEntry_ = 0;
19461            onChanged();
19462            return this;
19463          }
19464    
19465          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.StringTableSection)
19466        }
19467    
19468        static {
19469          defaultInstance = new StringTableSection(true);
19470          defaultInstance.initFields();
19471        }
19472    
19473        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.StringTableSection)
19474      }
19475    
19476      public interface SecretManagerSectionOrBuilder
19477          extends com.google.protobuf.MessageOrBuilder {
19478    
19479        // optional uint32 currentId = 1;
19480        /**
19481         * <code>optional uint32 currentId = 1;</code>
19482         */
19483        boolean hasCurrentId();
19484        /**
19485         * <code>optional uint32 currentId = 1;</code>
19486         */
19487        int getCurrentId();
19488    
19489        // optional uint32 tokenSequenceNumber = 2;
19490        /**
19491         * <code>optional uint32 tokenSequenceNumber = 2;</code>
19492         */
19493        boolean hasTokenSequenceNumber();
19494        /**
19495         * <code>optional uint32 tokenSequenceNumber = 2;</code>
19496         */
19497        int getTokenSequenceNumber();
19498    
19499        // optional uint32 numKeys = 3;
19500        /**
19501         * <code>optional uint32 numKeys = 3;</code>
19502         */
19503        boolean hasNumKeys();
19504        /**
19505         * <code>optional uint32 numKeys = 3;</code>
19506         */
19507        int getNumKeys();
19508    
19509        // optional uint32 numTokens = 4;
19510        /**
19511         * <code>optional uint32 numTokens = 4;</code>
19512         *
19513         * <pre>
19514         * repeated DelegationKey keys
19515         * repeated PersistToken tokens
19516         * </pre>
19517         */
19518        boolean hasNumTokens();
19519        /**
19520         * <code>optional uint32 numTokens = 4;</code>
19521         *
19522         * <pre>
19523         * repeated DelegationKey keys
19524         * repeated PersistToken tokens
19525         * </pre>
19526         */
19527        int getNumTokens();
19528      }
19529      /**
19530       * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection}
19531       */
19532      public static final class SecretManagerSection extends
19533          com.google.protobuf.GeneratedMessage
19534          implements SecretManagerSectionOrBuilder {
19535        // Use SecretManagerSection.newBuilder() to construct.
19536        private SecretManagerSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
19537          super(builder);
19538          this.unknownFields = builder.getUnknownFields();
19539        }
19540        private SecretManagerSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
19541    
19542        private static final SecretManagerSection defaultInstance;
19543        public static SecretManagerSection getDefaultInstance() {
19544          return defaultInstance;
19545        }
19546    
19547        public SecretManagerSection getDefaultInstanceForType() {
19548          return defaultInstance;
19549        }
19550    
19551        private final com.google.protobuf.UnknownFieldSet unknownFields;
19552        @java.lang.Override
19553        public final com.google.protobuf.UnknownFieldSet
19554            getUnknownFields() {
19555          return this.unknownFields;
19556        }
19557        private SecretManagerSection(
19558            com.google.protobuf.CodedInputStream input,
19559            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19560            throws com.google.protobuf.InvalidProtocolBufferException {
19561          initFields();
19562          int mutable_bitField0_ = 0;
19563          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
19564              com.google.protobuf.UnknownFieldSet.newBuilder();
19565          try {
19566            boolean done = false;
19567            while (!done) {
19568              int tag = input.readTag();
19569              switch (tag) {
19570                case 0:
19571                  done = true;
19572                  break;
19573                default: {
19574                  if (!parseUnknownField(input, unknownFields,
19575                                         extensionRegistry, tag)) {
19576                    done = true;
19577                  }
19578                  break;
19579                }
19580                case 8: {
19581                  bitField0_ |= 0x00000001;
19582                  currentId_ = input.readUInt32();
19583                  break;
19584                }
19585                case 16: {
19586                  bitField0_ |= 0x00000002;
19587                  tokenSequenceNumber_ = input.readUInt32();
19588                  break;
19589                }
19590                case 24: {
19591                  bitField0_ |= 0x00000004;
19592                  numKeys_ = input.readUInt32();
19593                  break;
19594                }
19595                case 32: {
19596                  bitField0_ |= 0x00000008;
19597                  numTokens_ = input.readUInt32();
19598                  break;
19599                }
19600              }
19601            }
19602          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19603            throw e.setUnfinishedMessage(this);
19604          } catch (java.io.IOException e) {
19605            throw new com.google.protobuf.InvalidProtocolBufferException(
19606                e.getMessage()).setUnfinishedMessage(this);
19607          } finally {
19608            this.unknownFields = unknownFields.build();
19609            makeExtensionsImmutable();
19610          }
19611        }
19612        public static final com.google.protobuf.Descriptors.Descriptor
19613            getDescriptor() {
19614          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
19615        }
19616    
19617        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19618            internalGetFieldAccessorTable() {
19619          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable
19620              .ensureFieldAccessorsInitialized(
19621                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.Builder.class);
19622        }
19623    
19624        public static com.google.protobuf.Parser<SecretManagerSection> PARSER =
19625            new com.google.protobuf.AbstractParser<SecretManagerSection>() {
19626          public SecretManagerSection parsePartialFrom(
19627              com.google.protobuf.CodedInputStream input,
19628              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19629              throws com.google.protobuf.InvalidProtocolBufferException {
19630            return new SecretManagerSection(input, extensionRegistry);
19631          }
19632        };
19633    
19634        @java.lang.Override
19635        public com.google.protobuf.Parser<SecretManagerSection> getParserForType() {
19636          return PARSER;
19637        }
19638    
19639        public interface DelegationKeyOrBuilder
19640            extends com.google.protobuf.MessageOrBuilder {
19641    
19642          // optional uint32 id = 1;
19643          /**
19644           * <code>optional uint32 id = 1;</code>
19645           */
19646          boolean hasId();
19647          /**
19648           * <code>optional uint32 id = 1;</code>
19649           */
19650          int getId();
19651    
19652          // optional uint64 expiryDate = 2;
19653          /**
19654           * <code>optional uint64 expiryDate = 2;</code>
19655           */
19656          boolean hasExpiryDate();
19657          /**
19658           * <code>optional uint64 expiryDate = 2;</code>
19659           */
19660          long getExpiryDate();
19661    
19662          // optional bytes key = 3;
19663          /**
19664           * <code>optional bytes key = 3;</code>
19665           */
19666          boolean hasKey();
19667          /**
19668           * <code>optional bytes key = 3;</code>
19669           */
19670          com.google.protobuf.ByteString getKey();
19671        }
19672        /**
19673         * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey}
19674         */
19675        public static final class DelegationKey extends
19676            com.google.protobuf.GeneratedMessage
19677            implements DelegationKeyOrBuilder {
19678          // Use DelegationKey.newBuilder() to construct.
19679          private DelegationKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
19680            super(builder);
19681            this.unknownFields = builder.getUnknownFields();
19682          }
19683          private DelegationKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
19684    
19685          private static final DelegationKey defaultInstance;
19686          public static DelegationKey getDefaultInstance() {
19687            return defaultInstance;
19688          }
19689    
19690          public DelegationKey getDefaultInstanceForType() {
19691            return defaultInstance;
19692          }
19693    
19694          private final com.google.protobuf.UnknownFieldSet unknownFields;
19695          @java.lang.Override
19696          public final com.google.protobuf.UnknownFieldSet
19697              getUnknownFields() {
19698            return this.unknownFields;
19699          }
19700          private DelegationKey(
19701              com.google.protobuf.CodedInputStream input,
19702              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19703              throws com.google.protobuf.InvalidProtocolBufferException {
19704            initFields();
19705            int mutable_bitField0_ = 0;
19706            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
19707                com.google.protobuf.UnknownFieldSet.newBuilder();
19708            try {
19709              boolean done = false;
19710              while (!done) {
19711                int tag = input.readTag();
19712                switch (tag) {
19713                  case 0:
19714                    done = true;
19715                    break;
19716                  default: {
19717                    if (!parseUnknownField(input, unknownFields,
19718                                           extensionRegistry, tag)) {
19719                      done = true;
19720                    }
19721                    break;
19722                  }
19723                  case 8: {
19724                    bitField0_ |= 0x00000001;
19725                    id_ = input.readUInt32();
19726                    break;
19727                  }
19728                  case 16: {
19729                    bitField0_ |= 0x00000002;
19730                    expiryDate_ = input.readUInt64();
19731                    break;
19732                  }
19733                  case 26: {
19734                    bitField0_ |= 0x00000004;
19735                    key_ = input.readBytes();
19736                    break;
19737                  }
19738                }
19739              }
19740            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19741              throw e.setUnfinishedMessage(this);
19742            } catch (java.io.IOException e) {
19743              throw new com.google.protobuf.InvalidProtocolBufferException(
19744                  e.getMessage()).setUnfinishedMessage(this);
19745            } finally {
19746              this.unknownFields = unknownFields.build();
19747              makeExtensionsImmutable();
19748            }
19749          }
19750          public static final com.google.protobuf.Descriptors.Descriptor
19751              getDescriptor() {
19752            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
19753          }
19754    
19755          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19756              internalGetFieldAccessorTable() {
19757            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable
19758                .ensureFieldAccessorsInitialized(
19759                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.Builder.class);
19760          }
19761    
19762          public static com.google.protobuf.Parser<DelegationKey> PARSER =
19763              new com.google.protobuf.AbstractParser<DelegationKey>() {
19764            public DelegationKey parsePartialFrom(
19765                com.google.protobuf.CodedInputStream input,
19766                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19767                throws com.google.protobuf.InvalidProtocolBufferException {
19768              return new DelegationKey(input, extensionRegistry);
19769            }
19770          };
19771    
19772          @java.lang.Override
19773          public com.google.protobuf.Parser<DelegationKey> getParserForType() {
19774            return PARSER;
19775          }
19776    
19777          private int bitField0_;
19778          // optional uint32 id = 1;
19779          public static final int ID_FIELD_NUMBER = 1;
19780          private int id_;
19781          /**
19782           * <code>optional uint32 id = 1;</code>
19783           */
19784          public boolean hasId() {
19785            return ((bitField0_ & 0x00000001) == 0x00000001);
19786          }
19787          /**
19788           * <code>optional uint32 id = 1;</code>
19789           */
19790          public int getId() {
19791            return id_;
19792          }
19793    
19794          // optional uint64 expiryDate = 2;
19795          public static final int EXPIRYDATE_FIELD_NUMBER = 2;
19796          private long expiryDate_;
19797          /**
19798           * <code>optional uint64 expiryDate = 2;</code>
19799           */
19800          public boolean hasExpiryDate() {
19801            return ((bitField0_ & 0x00000002) == 0x00000002);
19802          }
19803          /**
19804           * <code>optional uint64 expiryDate = 2;</code>
19805           */
19806          public long getExpiryDate() {
19807            return expiryDate_;
19808          }
19809    
19810          // optional bytes key = 3;
19811          public static final int KEY_FIELD_NUMBER = 3;
19812          private com.google.protobuf.ByteString key_;
19813          /**
19814           * <code>optional bytes key = 3;</code>
19815           */
19816          public boolean hasKey() {
19817            return ((bitField0_ & 0x00000004) == 0x00000004);
19818          }
19819          /**
19820           * <code>optional bytes key = 3;</code>
19821           */
19822          public com.google.protobuf.ByteString getKey() {
19823            return key_;
19824          }
19825    
19826          private void initFields() {
19827            id_ = 0;
19828            expiryDate_ = 0L;
19829            key_ = com.google.protobuf.ByteString.EMPTY;
19830          }
19831          private byte memoizedIsInitialized = -1;
19832          public final boolean isInitialized() {
19833            byte isInitialized = memoizedIsInitialized;
19834            if (isInitialized != -1) return isInitialized == 1;
19835    
19836            memoizedIsInitialized = 1;
19837            return true;
19838          }
19839    
19840          public void writeTo(com.google.protobuf.CodedOutputStream output)
19841                              throws java.io.IOException {
19842            getSerializedSize();
19843            if (((bitField0_ & 0x00000001) == 0x00000001)) {
19844              output.writeUInt32(1, id_);
19845            }
19846            if (((bitField0_ & 0x00000002) == 0x00000002)) {
19847              output.writeUInt64(2, expiryDate_);
19848            }
19849            if (((bitField0_ & 0x00000004) == 0x00000004)) {
19850              output.writeBytes(3, key_);
19851            }
19852            getUnknownFields().writeTo(output);
19853          }
19854    
19855          private int memoizedSerializedSize = -1;
19856          public int getSerializedSize() {
19857            int size = memoizedSerializedSize;
19858            if (size != -1) return size;
19859    
19860            size = 0;
19861            if (((bitField0_ & 0x00000001) == 0x00000001)) {
19862              size += com.google.protobuf.CodedOutputStream
19863                .computeUInt32Size(1, id_);
19864            }
19865            if (((bitField0_ & 0x00000002) == 0x00000002)) {
19866              size += com.google.protobuf.CodedOutputStream
19867                .computeUInt64Size(2, expiryDate_);
19868            }
19869            if (((bitField0_ & 0x00000004) == 0x00000004)) {
19870              size += com.google.protobuf.CodedOutputStream
19871                .computeBytesSize(3, key_);
19872            }
19873            size += getUnknownFields().getSerializedSize();
19874            memoizedSerializedSize = size;
19875            return size;
19876          }
19877    
19878          private static final long serialVersionUID = 0L;
19879          @java.lang.Override
19880          protected java.lang.Object writeReplace()
19881              throws java.io.ObjectStreamException {
19882            return super.writeReplace();
19883          }
19884    
19885          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19886              com.google.protobuf.ByteString data)
19887              throws com.google.protobuf.InvalidProtocolBufferException {
19888            return PARSER.parseFrom(data);
19889          }
19890          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19891              com.google.protobuf.ByteString data,
19892              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19893              throws com.google.protobuf.InvalidProtocolBufferException {
19894            return PARSER.parseFrom(data, extensionRegistry);
19895          }
19896          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(byte[] data)
19897              throws com.google.protobuf.InvalidProtocolBufferException {
19898            return PARSER.parseFrom(data);
19899          }
19900          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19901              byte[] data,
19902              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19903              throws com.google.protobuf.InvalidProtocolBufferException {
19904            return PARSER.parseFrom(data, extensionRegistry);
19905          }
19906          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(java.io.InputStream input)
19907              throws java.io.IOException {
19908            return PARSER.parseFrom(input);
19909          }
19910          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19911              java.io.InputStream input,
19912              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19913              throws java.io.IOException {
19914            return PARSER.parseFrom(input, extensionRegistry);
19915          }
19916          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseDelimitedFrom(java.io.InputStream input)
19917              throws java.io.IOException {
19918            return PARSER.parseDelimitedFrom(input);
19919          }
19920          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseDelimitedFrom(
19921              java.io.InputStream input,
19922              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19923              throws java.io.IOException {
19924            return PARSER.parseDelimitedFrom(input, extensionRegistry);
19925          }
19926          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19927              com.google.protobuf.CodedInputStream input)
19928              throws java.io.IOException {
19929            return PARSER.parseFrom(input);
19930          }
19931          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19932              com.google.protobuf.CodedInputStream input,
19933              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19934              throws java.io.IOException {
19935            return PARSER.parseFrom(input, extensionRegistry);
19936          }
19937    
19938          public static Builder newBuilder() { return Builder.create(); }
19939          public Builder newBuilderForType() { return newBuilder(); }
19940          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey prototype) {
19941            return newBuilder().mergeFrom(prototype);
19942          }
19943          public Builder toBuilder() { return newBuilder(this); }
19944    
19945          @java.lang.Override
19946          protected Builder newBuilderForType(
19947              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19948            Builder builder = new Builder(parent);
19949            return builder;
19950          }
19951          /**
19952           * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey}
19953           */
19954          public static final class Builder extends
19955              com.google.protobuf.GeneratedMessage.Builder<Builder>
19956             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKeyOrBuilder {
19957            public static final com.google.protobuf.Descriptors.Descriptor
19958                getDescriptor() {
19959              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
19960            }
19961    
19962            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19963                internalGetFieldAccessorTable() {
19964              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable
19965                  .ensureFieldAccessorsInitialized(
19966                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.Builder.class);
19967            }
19968    
19969            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.newBuilder()
19970            private Builder() {
19971              maybeForceBuilderInitialization();
19972            }
19973    
19974            private Builder(
19975                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19976              super(parent);
19977              maybeForceBuilderInitialization();
19978            }
19979            private void maybeForceBuilderInitialization() {
19980              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19981              }
19982            }
19983            private static Builder create() {
19984              return new Builder();
19985            }
19986    
19987            public Builder clear() {
19988              super.clear();
19989              id_ = 0;
19990              bitField0_ = (bitField0_ & ~0x00000001);
19991              expiryDate_ = 0L;
19992              bitField0_ = (bitField0_ & ~0x00000002);
19993              key_ = com.google.protobuf.ByteString.EMPTY;
19994              bitField0_ = (bitField0_ & ~0x00000004);
19995              return this;
19996            }
19997    
19998            public Builder clone() {
19999              return create().mergeFrom(buildPartial());
20000            }
20001    
20002            public com.google.protobuf.Descriptors.Descriptor
20003                getDescriptorForType() {
20004              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
20005            }
20006    
20007            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey getDefaultInstanceForType() {
20008              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.getDefaultInstance();
20009            }
20010    
20011            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey build() {
20012              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey result = buildPartial();
20013              if (!result.isInitialized()) {
20014                throw newUninitializedMessageException(result);
20015              }
20016              return result;
20017            }
20018    
20019            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey buildPartial() {
20020              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey(this);
20021              int from_bitField0_ = bitField0_;
20022              int to_bitField0_ = 0;
20023              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
20024                to_bitField0_ |= 0x00000001;
20025              }
20026              result.id_ = id_;
20027              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
20028                to_bitField0_ |= 0x00000002;
20029              }
20030              result.expiryDate_ = expiryDate_;
20031              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
20032                to_bitField0_ |= 0x00000004;
20033              }
20034              result.key_ = key_;
20035              result.bitField0_ = to_bitField0_;
20036              onBuilt();
20037              return result;
20038            }
20039    
20040            public Builder mergeFrom(com.google.protobuf.Message other) {
20041              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey) {
20042                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey)other);
20043              } else {
20044                super.mergeFrom(other);
20045                return this;
20046              }
20047            }
20048    
20049            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey other) {
20050              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.getDefaultInstance()) return this;
20051              if (other.hasId()) {
20052                setId(other.getId());
20053              }
20054              if (other.hasExpiryDate()) {
20055                setExpiryDate(other.getExpiryDate());
20056              }
20057              if (other.hasKey()) {
20058                setKey(other.getKey());
20059              }
20060              this.mergeUnknownFields(other.getUnknownFields());
20061              return this;
20062            }
20063    
20064            public final boolean isInitialized() {
20065              return true;
20066            }
20067    
20068            public Builder mergeFrom(
20069                com.google.protobuf.CodedInputStream input,
20070                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20071                throws java.io.IOException {
20072              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parsedMessage = null;
20073              try {
20074                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20075              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20076                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey) e.getUnfinishedMessage();
20077                throw e;
20078              } finally {
20079                if (parsedMessage != null) {
20080                  mergeFrom(parsedMessage);
20081                }
20082              }
20083              return this;
20084            }
20085            private int bitField0_;
20086    
20087            // optional uint32 id = 1;
20088            private int id_ ;
20089            /**
20090             * <code>optional uint32 id = 1;</code>
20091             */
20092            public boolean hasId() {
20093              return ((bitField0_ & 0x00000001) == 0x00000001);
20094            }
20095            /**
20096             * <code>optional uint32 id = 1;</code>
20097             */
20098            public int getId() {
20099              return id_;
20100            }
20101            /**
20102             * <code>optional uint32 id = 1;</code>
20103             */
20104            public Builder setId(int value) {
20105              bitField0_ |= 0x00000001;
20106              id_ = value;
20107              onChanged();
20108              return this;
20109            }
20110            /**
20111             * <code>optional uint32 id = 1;</code>
20112             */
20113            public Builder clearId() {
20114              bitField0_ = (bitField0_ & ~0x00000001);
20115              id_ = 0;
20116              onChanged();
20117              return this;
20118            }
20119    
20120            // optional uint64 expiryDate = 2;
20121            private long expiryDate_ ;
20122            /**
20123             * <code>optional uint64 expiryDate = 2;</code>
20124             */
20125            public boolean hasExpiryDate() {
20126              return ((bitField0_ & 0x00000002) == 0x00000002);
20127            }
20128            /**
20129             * <code>optional uint64 expiryDate = 2;</code>
20130             */
20131            public long getExpiryDate() {
20132              return expiryDate_;
20133            }
20134            /**
20135             * <code>optional uint64 expiryDate = 2;</code>
20136             */
20137            public Builder setExpiryDate(long value) {
20138              bitField0_ |= 0x00000002;
20139              expiryDate_ = value;
20140              onChanged();
20141              return this;
20142            }
20143            /**
20144             * <code>optional uint64 expiryDate = 2;</code>
20145             */
20146            public Builder clearExpiryDate() {
20147              bitField0_ = (bitField0_ & ~0x00000002);
20148              expiryDate_ = 0L;
20149              onChanged();
20150              return this;
20151            }
20152    
20153            // optional bytes key = 3;
20154            private com.google.protobuf.ByteString key_ = com.google.protobuf.ByteString.EMPTY;
20155            /**
20156             * <code>optional bytes key = 3;</code>
20157             */
20158            public boolean hasKey() {
20159              return ((bitField0_ & 0x00000004) == 0x00000004);
20160            }
20161            /**
20162             * <code>optional bytes key = 3;</code>
20163             */
20164            public com.google.protobuf.ByteString getKey() {
20165              return key_;
20166            }
20167            /**
20168             * <code>optional bytes key = 3;</code>
20169             */
20170            public Builder setKey(com.google.protobuf.ByteString value) {
20171              if (value == null) {
20172        throw new NullPointerException();
20173      }
20174      bitField0_ |= 0x00000004;
20175              key_ = value;
20176              onChanged();
20177              return this;
20178            }
20179            /**
20180             * <code>optional bytes key = 3;</code>
20181             */
20182            public Builder clearKey() {
20183              bitField0_ = (bitField0_ & ~0x00000004);
20184              key_ = getDefaultInstance().getKey();
20185              onChanged();
20186              return this;
20187            }
20188    
20189            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey)
20190          }
20191    
20192          static {
20193            defaultInstance = new DelegationKey(true);
20194            defaultInstance.initFields();
20195          }
20196    
20197          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey)
20198        }
20199    
20200        public interface PersistTokenOrBuilder
20201            extends com.google.protobuf.MessageOrBuilder {
20202    
20203          // optional uint32 version = 1;
20204          /**
20205           * <code>optional uint32 version = 1;</code>
20206           */
20207          boolean hasVersion();
20208          /**
20209           * <code>optional uint32 version = 1;</code>
20210           */
20211          int getVersion();
20212    
20213          // optional string owner = 2;
20214          /**
20215           * <code>optional string owner = 2;</code>
20216           */
20217          boolean hasOwner();
20218          /**
20219           * <code>optional string owner = 2;</code>
20220           */
20221          java.lang.String getOwner();
20222          /**
20223           * <code>optional string owner = 2;</code>
20224           */
20225          com.google.protobuf.ByteString
20226              getOwnerBytes();
20227    
20228          // optional string renewer = 3;
20229          /**
20230           * <code>optional string renewer = 3;</code>
20231           */
20232          boolean hasRenewer();
20233          /**
20234           * <code>optional string renewer = 3;</code>
20235           */
20236          java.lang.String getRenewer();
20237          /**
20238           * <code>optional string renewer = 3;</code>
20239           */
20240          com.google.protobuf.ByteString
20241              getRenewerBytes();
20242    
20243          // optional string realUser = 4;
20244          /**
20245           * <code>optional string realUser = 4;</code>
20246           */
20247          boolean hasRealUser();
20248          /**
20249           * <code>optional string realUser = 4;</code>
20250           */
20251          java.lang.String getRealUser();
20252          /**
20253           * <code>optional string realUser = 4;</code>
20254           */
20255          com.google.protobuf.ByteString
20256              getRealUserBytes();
20257    
20258          // optional uint64 issueDate = 5;
20259          /**
20260           * <code>optional uint64 issueDate = 5;</code>
20261           */
20262          boolean hasIssueDate();
20263          /**
20264           * <code>optional uint64 issueDate = 5;</code>
20265           */
20266          long getIssueDate();
20267    
20268          // optional uint64 maxDate = 6;
20269          /**
20270           * <code>optional uint64 maxDate = 6;</code>
20271           */
20272          boolean hasMaxDate();
20273          /**
20274           * <code>optional uint64 maxDate = 6;</code>
20275           */
20276          long getMaxDate();
20277    
20278          // optional uint32 sequenceNumber = 7;
20279          /**
20280           * <code>optional uint32 sequenceNumber = 7;</code>
20281           */
20282          boolean hasSequenceNumber();
20283          /**
20284           * <code>optional uint32 sequenceNumber = 7;</code>
20285           */
20286          int getSequenceNumber();
20287    
20288          // optional uint32 masterKeyId = 8;
20289          /**
20290           * <code>optional uint32 masterKeyId = 8;</code>
20291           */
20292          boolean hasMasterKeyId();
20293          /**
20294           * <code>optional uint32 masterKeyId = 8;</code>
20295           */
20296          int getMasterKeyId();
20297    
20298          // optional uint64 expiryDate = 9;
20299          /**
20300           * <code>optional uint64 expiryDate = 9;</code>
20301           */
20302          boolean hasExpiryDate();
20303          /**
20304           * <code>optional uint64 expiryDate = 9;</code>
20305           */
20306          long getExpiryDate();
20307        }
20308        /**
20309         * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.PersistToken}
20310         */
20311        public static final class PersistToken extends
20312            com.google.protobuf.GeneratedMessage
20313            implements PersistTokenOrBuilder {
20314          // Use PersistToken.newBuilder() to construct.
20315          private PersistToken(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
20316            super(builder);
20317            this.unknownFields = builder.getUnknownFields();
20318          }
20319          private PersistToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
20320    
20321          private static final PersistToken defaultInstance;
20322          public static PersistToken getDefaultInstance() {
20323            return defaultInstance;
20324          }
20325    
20326          public PersistToken getDefaultInstanceForType() {
20327            return defaultInstance;
20328          }
20329    
20330          private final com.google.protobuf.UnknownFieldSet unknownFields;
20331          @java.lang.Override
20332          public final com.google.protobuf.UnknownFieldSet
20333              getUnknownFields() {
20334            return this.unknownFields;
20335          }
20336          private PersistToken(
20337              com.google.protobuf.CodedInputStream input,
20338              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20339              throws com.google.protobuf.InvalidProtocolBufferException {
20340            initFields();
20341            int mutable_bitField0_ = 0;
20342            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
20343                com.google.protobuf.UnknownFieldSet.newBuilder();
20344            try {
20345              boolean done = false;
20346              while (!done) {
20347                int tag = input.readTag();
20348                switch (tag) {
20349                  case 0:
20350                    done = true;
20351                    break;
20352                  default: {
20353                    if (!parseUnknownField(input, unknownFields,
20354                                           extensionRegistry, tag)) {
20355                      done = true;
20356                    }
20357                    break;
20358                  }
20359                  case 8: {
20360                    bitField0_ |= 0x00000001;
20361                    version_ = input.readUInt32();
20362                    break;
20363                  }
20364                  case 18: {
20365                    bitField0_ |= 0x00000002;
20366                    owner_ = input.readBytes();
20367                    break;
20368                  }
20369                  case 26: {
20370                    bitField0_ |= 0x00000004;
20371                    renewer_ = input.readBytes();
20372                    break;
20373                  }
20374                  case 34: {
20375                    bitField0_ |= 0x00000008;
20376                    realUser_ = input.readBytes();
20377                    break;
20378                  }
20379                  case 40: {
20380                    bitField0_ |= 0x00000010;
20381                    issueDate_ = input.readUInt64();
20382                    break;
20383                  }
20384                  case 48: {
20385                    bitField0_ |= 0x00000020;
20386                    maxDate_ = input.readUInt64();
20387                    break;
20388                  }
20389                  case 56: {
20390                    bitField0_ |= 0x00000040;
20391                    sequenceNumber_ = input.readUInt32();
20392                    break;
20393                  }
20394                  case 64: {
20395                    bitField0_ |= 0x00000080;
20396                    masterKeyId_ = input.readUInt32();
20397                    break;
20398                  }
20399                  case 72: {
20400                    bitField0_ |= 0x00000100;
20401                    expiryDate_ = input.readUInt64();
20402                    break;
20403                  }
20404                }
20405              }
20406            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20407              throw e.setUnfinishedMessage(this);
20408            } catch (java.io.IOException e) {
20409              throw new com.google.protobuf.InvalidProtocolBufferException(
20410                  e.getMessage()).setUnfinishedMessage(this);
20411            } finally {
20412              this.unknownFields = unknownFields.build();
20413              makeExtensionsImmutable();
20414            }
20415          }
20416          public static final com.google.protobuf.Descriptors.Descriptor
20417              getDescriptor() {
20418            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
20419          }
20420    
20421          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20422              internalGetFieldAccessorTable() {
20423            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable
20424                .ensureFieldAccessorsInitialized(
20425                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.Builder.class);
20426          }
20427    
20428          public static com.google.protobuf.Parser<PersistToken> PARSER =
20429              new com.google.protobuf.AbstractParser<PersistToken>() {
20430            public PersistToken parsePartialFrom(
20431                com.google.protobuf.CodedInputStream input,
20432                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20433                throws com.google.protobuf.InvalidProtocolBufferException {
20434              return new PersistToken(input, extensionRegistry);
20435            }
20436          };
20437    
20438          @java.lang.Override
20439          public com.google.protobuf.Parser<PersistToken> getParserForType() {
20440            return PARSER;
20441          }
20442    
20443          private int bitField0_;
20444          // optional uint32 version = 1;
20445          public static final int VERSION_FIELD_NUMBER = 1;
20446          private int version_;
20447          /**
20448           * <code>optional uint32 version = 1;</code>
20449           */
20450          public boolean hasVersion() {
20451            return ((bitField0_ & 0x00000001) == 0x00000001);
20452          }
20453          /**
20454           * <code>optional uint32 version = 1;</code>
20455           */
20456          public int getVersion() {
20457            return version_;
20458          }
20459    
20460          // optional string owner = 2;
20461          public static final int OWNER_FIELD_NUMBER = 2;
20462          private java.lang.Object owner_;
20463          /**
20464           * <code>optional string owner = 2;</code>
20465           */
20466          public boolean hasOwner() {
20467            return ((bitField0_ & 0x00000002) == 0x00000002);
20468          }
20469          /**
20470           * <code>optional string owner = 2;</code>
20471           */
20472          public java.lang.String getOwner() {
20473            java.lang.Object ref = owner_;
20474            if (ref instanceof java.lang.String) {
20475              return (java.lang.String) ref;
20476            } else {
20477              com.google.protobuf.ByteString bs = 
20478                  (com.google.protobuf.ByteString) ref;
20479              java.lang.String s = bs.toStringUtf8();
20480              if (bs.isValidUtf8()) {
20481                owner_ = s;
20482              }
20483              return s;
20484            }
20485          }
20486          /**
20487           * <code>optional string owner = 2;</code>
20488           */
20489          public com.google.protobuf.ByteString
20490              getOwnerBytes() {
20491            java.lang.Object ref = owner_;
20492            if (ref instanceof java.lang.String) {
20493              com.google.protobuf.ByteString b = 
20494                  com.google.protobuf.ByteString.copyFromUtf8(
20495                      (java.lang.String) ref);
20496              owner_ = b;
20497              return b;
20498            } else {
20499              return (com.google.protobuf.ByteString) ref;
20500            }
20501          }
20502    
20503          // optional string renewer = 3;
20504          public static final int RENEWER_FIELD_NUMBER = 3;
20505          private java.lang.Object renewer_;
20506          /**
20507           * <code>optional string renewer = 3;</code>
20508           */
20509          public boolean hasRenewer() {
20510            return ((bitField0_ & 0x00000004) == 0x00000004);
20511          }
20512          /**
20513           * <code>optional string renewer = 3;</code>
20514           */
20515          public java.lang.String getRenewer() {
20516            java.lang.Object ref = renewer_;
20517            if (ref instanceof java.lang.String) {
20518              return (java.lang.String) ref;
20519            } else {
20520              com.google.protobuf.ByteString bs = 
20521                  (com.google.protobuf.ByteString) ref;
20522              java.lang.String s = bs.toStringUtf8();
20523              if (bs.isValidUtf8()) {
20524                renewer_ = s;
20525              }
20526              return s;
20527            }
20528          }
20529          /**
20530           * <code>optional string renewer = 3;</code>
20531           */
20532          public com.google.protobuf.ByteString
20533              getRenewerBytes() {
20534            java.lang.Object ref = renewer_;
20535            if (ref instanceof java.lang.String) {
20536              com.google.protobuf.ByteString b = 
20537                  com.google.protobuf.ByteString.copyFromUtf8(
20538                      (java.lang.String) ref);
20539              renewer_ = b;
20540              return b;
20541            } else {
20542              return (com.google.protobuf.ByteString) ref;
20543            }
20544          }
20545    
20546          // optional string realUser = 4;
20547          public static final int REALUSER_FIELD_NUMBER = 4;
20548          private java.lang.Object realUser_;
20549          /**
20550           * <code>optional string realUser = 4;</code>
20551           */
20552          public boolean hasRealUser() {
20553            return ((bitField0_ & 0x00000008) == 0x00000008);
20554          }
20555          /**
20556           * <code>optional string realUser = 4;</code>
20557           */
20558          public java.lang.String getRealUser() {
20559            java.lang.Object ref = realUser_;
20560            if (ref instanceof java.lang.String) {
20561              return (java.lang.String) ref;
20562            } else {
20563              com.google.protobuf.ByteString bs = 
20564                  (com.google.protobuf.ByteString) ref;
20565              java.lang.String s = bs.toStringUtf8();
20566              if (bs.isValidUtf8()) {
20567                realUser_ = s;
20568              }
20569              return s;
20570            }
20571          }
20572          /**
20573           * <code>optional string realUser = 4;</code>
20574           */
20575          public com.google.protobuf.ByteString
20576              getRealUserBytes() {
20577            java.lang.Object ref = realUser_;
20578            if (ref instanceof java.lang.String) {
20579              com.google.protobuf.ByteString b = 
20580                  com.google.protobuf.ByteString.copyFromUtf8(
20581                      (java.lang.String) ref);
20582              realUser_ = b;
20583              return b;
20584            } else {
20585              return (com.google.protobuf.ByteString) ref;
20586            }
20587          }
20588    
20589          // optional uint64 issueDate = 5;
20590          public static final int ISSUEDATE_FIELD_NUMBER = 5;
20591          private long issueDate_;
20592          /**
20593           * <code>optional uint64 issueDate = 5;</code>
20594           */
20595          public boolean hasIssueDate() {
20596            return ((bitField0_ & 0x00000010) == 0x00000010);
20597          }
20598          /**
20599           * <code>optional uint64 issueDate = 5;</code>
20600           */
20601          public long getIssueDate() {
20602            return issueDate_;
20603          }
20604    
20605          // optional uint64 maxDate = 6;
20606          public static final int MAXDATE_FIELD_NUMBER = 6;
20607          private long maxDate_;
20608          /**
20609           * <code>optional uint64 maxDate = 6;</code>
20610           */
20611          public boolean hasMaxDate() {
20612            return ((bitField0_ & 0x00000020) == 0x00000020);
20613          }
20614          /**
20615           * <code>optional uint64 maxDate = 6;</code>
20616           */
20617          public long getMaxDate() {
20618            return maxDate_;
20619          }
20620    
20621          // optional uint32 sequenceNumber = 7;
20622          public static final int SEQUENCENUMBER_FIELD_NUMBER = 7;
20623          private int sequenceNumber_;
20624          /**
20625           * <code>optional uint32 sequenceNumber = 7;</code>
20626           */
20627          public boolean hasSequenceNumber() {
20628            return ((bitField0_ & 0x00000040) == 0x00000040);
20629          }
20630          /**
20631           * <code>optional uint32 sequenceNumber = 7;</code>
20632           */
20633          public int getSequenceNumber() {
20634            return sequenceNumber_;
20635          }
20636    
20637          // optional uint32 masterKeyId = 8;
20638          public static final int MASTERKEYID_FIELD_NUMBER = 8;
20639          private int masterKeyId_;
20640          /**
20641           * <code>optional uint32 masterKeyId = 8;</code>
20642           */
20643          public boolean hasMasterKeyId() {
20644            return ((bitField0_ & 0x00000080) == 0x00000080);
20645          }
20646          /**
20647           * <code>optional uint32 masterKeyId = 8;</code>
20648           */
20649          public int getMasterKeyId() {
20650            return masterKeyId_;
20651          }
20652    
20653          // optional uint64 expiryDate = 9;
20654          public static final int EXPIRYDATE_FIELD_NUMBER = 9;
20655          private long expiryDate_;
20656          /**
20657           * <code>optional uint64 expiryDate = 9;</code>
20658           */
20659          public boolean hasExpiryDate() {
20660            return ((bitField0_ & 0x00000100) == 0x00000100);
20661          }
20662          /**
20663           * <code>optional uint64 expiryDate = 9;</code>
20664           */
20665          public long getExpiryDate() {
20666            return expiryDate_;
20667          }
20668    
20669          private void initFields() {
20670            version_ = 0;
20671            owner_ = "";
20672            renewer_ = "";
20673            realUser_ = "";
20674            issueDate_ = 0L;
20675            maxDate_ = 0L;
20676            sequenceNumber_ = 0;
20677            masterKeyId_ = 0;
20678            expiryDate_ = 0L;
20679          }
20680          private byte memoizedIsInitialized = -1;
20681          public final boolean isInitialized() {
20682            byte isInitialized = memoizedIsInitialized;
20683            if (isInitialized != -1) return isInitialized == 1;
20684    
20685            memoizedIsInitialized = 1;
20686            return true;
20687          }
20688    
20689          public void writeTo(com.google.protobuf.CodedOutputStream output)
20690                              throws java.io.IOException {
20691            getSerializedSize();
20692            if (((bitField0_ & 0x00000001) == 0x00000001)) {
20693              output.writeUInt32(1, version_);
20694            }
20695            if (((bitField0_ & 0x00000002) == 0x00000002)) {
20696              output.writeBytes(2, getOwnerBytes());
20697            }
20698            if (((bitField0_ & 0x00000004) == 0x00000004)) {
20699              output.writeBytes(3, getRenewerBytes());
20700            }
20701            if (((bitField0_ & 0x00000008) == 0x00000008)) {
20702              output.writeBytes(4, getRealUserBytes());
20703            }
20704            if (((bitField0_ & 0x00000010) == 0x00000010)) {
20705              output.writeUInt64(5, issueDate_);
20706            }
20707            if (((bitField0_ & 0x00000020) == 0x00000020)) {
20708              output.writeUInt64(6, maxDate_);
20709            }
20710            if (((bitField0_ & 0x00000040) == 0x00000040)) {
20711              output.writeUInt32(7, sequenceNumber_);
20712            }
20713            if (((bitField0_ & 0x00000080) == 0x00000080)) {
20714              output.writeUInt32(8, masterKeyId_);
20715            }
20716            if (((bitField0_ & 0x00000100) == 0x00000100)) {
20717              output.writeUInt64(9, expiryDate_);
20718            }
20719            getUnknownFields().writeTo(output);
20720          }
20721    
20722          private int memoizedSerializedSize = -1;
20723          public int getSerializedSize() {
20724            int size = memoizedSerializedSize;
20725            if (size != -1) return size;
20726    
20727            size = 0;
20728            if (((bitField0_ & 0x00000001) == 0x00000001)) {
20729              size += com.google.protobuf.CodedOutputStream
20730                .computeUInt32Size(1, version_);
20731            }
20732            if (((bitField0_ & 0x00000002) == 0x00000002)) {
20733              size += com.google.protobuf.CodedOutputStream
20734                .computeBytesSize(2, getOwnerBytes());
20735            }
20736            if (((bitField0_ & 0x00000004) == 0x00000004)) {
20737              size += com.google.protobuf.CodedOutputStream
20738                .computeBytesSize(3, getRenewerBytes());
20739            }
20740            if (((bitField0_ & 0x00000008) == 0x00000008)) {
20741              size += com.google.protobuf.CodedOutputStream
20742                .computeBytesSize(4, getRealUserBytes());
20743            }
20744            if (((bitField0_ & 0x00000010) == 0x00000010)) {
20745              size += com.google.protobuf.CodedOutputStream
20746                .computeUInt64Size(5, issueDate_);
20747            }
20748            if (((bitField0_ & 0x00000020) == 0x00000020)) {
20749              size += com.google.protobuf.CodedOutputStream
20750                .computeUInt64Size(6, maxDate_);
20751            }
20752            if (((bitField0_ & 0x00000040) == 0x00000040)) {
20753              size += com.google.protobuf.CodedOutputStream
20754                .computeUInt32Size(7, sequenceNumber_);
20755            }
20756            if (((bitField0_ & 0x00000080) == 0x00000080)) {
20757              size += com.google.protobuf.CodedOutputStream
20758                .computeUInt32Size(8, masterKeyId_);
20759            }
20760            if (((bitField0_ & 0x00000100) == 0x00000100)) {
20761              size += com.google.protobuf.CodedOutputStream
20762                .computeUInt64Size(9, expiryDate_);
20763            }
20764            size += getUnknownFields().getSerializedSize();
20765            memoizedSerializedSize = size;
20766            return size;
20767          }
20768    
20769          private static final long serialVersionUID = 0L;
20770          @java.lang.Override
20771          protected java.lang.Object writeReplace()
20772              throws java.io.ObjectStreamException {
20773            return super.writeReplace();
20774          }
20775    
20776          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20777              com.google.protobuf.ByteString data)
20778              throws com.google.protobuf.InvalidProtocolBufferException {
20779            return PARSER.parseFrom(data);
20780          }
20781          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20782              com.google.protobuf.ByteString data,
20783              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20784              throws com.google.protobuf.InvalidProtocolBufferException {
20785            return PARSER.parseFrom(data, extensionRegistry);
20786          }
20787          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(byte[] data)
20788              throws com.google.protobuf.InvalidProtocolBufferException {
20789            return PARSER.parseFrom(data);
20790          }
20791          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20792              byte[] data,
20793              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20794              throws com.google.protobuf.InvalidProtocolBufferException {
20795            return PARSER.parseFrom(data, extensionRegistry);
20796          }
20797          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(java.io.InputStream input)
20798              throws java.io.IOException {
20799            return PARSER.parseFrom(input);
20800          }
20801          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20802              java.io.InputStream input,
20803              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20804              throws java.io.IOException {
20805            return PARSER.parseFrom(input, extensionRegistry);
20806          }
20807          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseDelimitedFrom(java.io.InputStream input)
20808              throws java.io.IOException {
20809            return PARSER.parseDelimitedFrom(input);
20810          }
20811          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseDelimitedFrom(
20812              java.io.InputStream input,
20813              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20814              throws java.io.IOException {
20815            return PARSER.parseDelimitedFrom(input, extensionRegistry);
20816          }
20817          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20818              com.google.protobuf.CodedInputStream input)
20819              throws java.io.IOException {
20820            return PARSER.parseFrom(input);
20821          }
20822          public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20823              com.google.protobuf.CodedInputStream input,
20824              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20825              throws java.io.IOException {
20826            return PARSER.parseFrom(input, extensionRegistry);
20827          }
20828    
20829          public static Builder newBuilder() { return Builder.create(); }
20830          public Builder newBuilderForType() { return newBuilder(); }
20831          public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken prototype) {
20832            return newBuilder().mergeFrom(prototype);
20833          }
20834          public Builder toBuilder() { return newBuilder(this); }
20835    
20836          @java.lang.Override
20837          protected Builder newBuilderForType(
20838              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20839            Builder builder = new Builder(parent);
20840            return builder;
20841          }
20842          /**
20843           * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.PersistToken}
20844           */
20845          public static final class Builder extends
20846              com.google.protobuf.GeneratedMessage.Builder<Builder>
20847             implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistTokenOrBuilder {
20848            public static final com.google.protobuf.Descriptors.Descriptor
20849                getDescriptor() {
20850              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
20851            }
20852    
20853            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20854                internalGetFieldAccessorTable() {
20855              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable
20856                  .ensureFieldAccessorsInitialized(
20857                      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.Builder.class);
20858            }
20859    
20860            // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.newBuilder()
20861            private Builder() {
20862              maybeForceBuilderInitialization();
20863            }
20864    
20865            private Builder(
20866                com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20867              super(parent);
20868              maybeForceBuilderInitialization();
20869            }
20870            private void maybeForceBuilderInitialization() {
20871              if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
20872              }
20873            }
20874            private static Builder create() {
20875              return new Builder();
20876            }
20877    
20878            public Builder clear() {
20879              super.clear();
20880              version_ = 0;
20881              bitField0_ = (bitField0_ & ~0x00000001);
20882              owner_ = "";
20883              bitField0_ = (bitField0_ & ~0x00000002);
20884              renewer_ = "";
20885              bitField0_ = (bitField0_ & ~0x00000004);
20886              realUser_ = "";
20887              bitField0_ = (bitField0_ & ~0x00000008);
20888              issueDate_ = 0L;
20889              bitField0_ = (bitField0_ & ~0x00000010);
20890              maxDate_ = 0L;
20891              bitField0_ = (bitField0_ & ~0x00000020);
20892              sequenceNumber_ = 0;
20893              bitField0_ = (bitField0_ & ~0x00000040);
20894              masterKeyId_ = 0;
20895              bitField0_ = (bitField0_ & ~0x00000080);
20896              expiryDate_ = 0L;
20897              bitField0_ = (bitField0_ & ~0x00000100);
20898              return this;
20899            }
20900    
20901            public Builder clone() {
20902              return create().mergeFrom(buildPartial());
20903            }
20904    
20905            public com.google.protobuf.Descriptors.Descriptor
20906                getDescriptorForType() {
20907              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
20908            }
20909    
20910            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken getDefaultInstanceForType() {
20911              return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.getDefaultInstance();
20912            }
20913    
20914            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken build() {
20915              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken result = buildPartial();
20916              if (!result.isInitialized()) {
20917                throw newUninitializedMessageException(result);
20918              }
20919              return result;
20920            }
20921    
20922            public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken buildPartial() {
20923              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken(this);
20924              int from_bitField0_ = bitField0_;
20925              int to_bitField0_ = 0;
20926              if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
20927                to_bitField0_ |= 0x00000001;
20928              }
20929              result.version_ = version_;
20930              if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
20931                to_bitField0_ |= 0x00000002;
20932              }
20933              result.owner_ = owner_;
20934              if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
20935                to_bitField0_ |= 0x00000004;
20936              }
20937              result.renewer_ = renewer_;
20938              if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
20939                to_bitField0_ |= 0x00000008;
20940              }
20941              result.realUser_ = realUser_;
20942              if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
20943                to_bitField0_ |= 0x00000010;
20944              }
20945              result.issueDate_ = issueDate_;
20946              if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
20947                to_bitField0_ |= 0x00000020;
20948              }
20949              result.maxDate_ = maxDate_;
20950              if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
20951                to_bitField0_ |= 0x00000040;
20952              }
20953              result.sequenceNumber_ = sequenceNumber_;
20954              if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
20955                to_bitField0_ |= 0x00000080;
20956              }
20957              result.masterKeyId_ = masterKeyId_;
20958              if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
20959                to_bitField0_ |= 0x00000100;
20960              }
20961              result.expiryDate_ = expiryDate_;
20962              result.bitField0_ = to_bitField0_;
20963              onBuilt();
20964              return result;
20965            }
20966    
20967            public Builder mergeFrom(com.google.protobuf.Message other) {
20968              if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken) {
20969                return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken)other);
20970              } else {
20971                super.mergeFrom(other);
20972                return this;
20973              }
20974            }
20975    
20976            public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken other) {
20977              if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.getDefaultInstance()) return this;
20978              if (other.hasVersion()) {
20979                setVersion(other.getVersion());
20980              }
20981              if (other.hasOwner()) {
20982                bitField0_ |= 0x00000002;
20983                owner_ = other.owner_;
20984                onChanged();
20985              }
20986              if (other.hasRenewer()) {
20987                bitField0_ |= 0x00000004;
20988                renewer_ = other.renewer_;
20989                onChanged();
20990              }
20991              if (other.hasRealUser()) {
20992                bitField0_ |= 0x00000008;
20993                realUser_ = other.realUser_;
20994                onChanged();
20995              }
20996              if (other.hasIssueDate()) {
20997                setIssueDate(other.getIssueDate());
20998              }
20999              if (other.hasMaxDate()) {
21000                setMaxDate(other.getMaxDate());
21001              }
21002              if (other.hasSequenceNumber()) {
21003                setSequenceNumber(other.getSequenceNumber());
21004              }
21005              if (other.hasMasterKeyId()) {
21006                setMasterKeyId(other.getMasterKeyId());
21007              }
21008              if (other.hasExpiryDate()) {
21009                setExpiryDate(other.getExpiryDate());
21010              }
21011              this.mergeUnknownFields(other.getUnknownFields());
21012              return this;
21013            }
21014    
21015            public final boolean isInitialized() {
21016              return true;
21017            }
21018    
21019            public Builder mergeFrom(
21020                com.google.protobuf.CodedInputStream input,
21021                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21022                throws java.io.IOException {
21023              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parsedMessage = null;
21024              try {
21025                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21026              } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21027                parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken) e.getUnfinishedMessage();
21028                throw e;
21029              } finally {
21030                if (parsedMessage != null) {
21031                  mergeFrom(parsedMessage);
21032                }
21033              }
21034              return this;
21035            }
21036            private int bitField0_;
21037    
21038            // optional uint32 version = 1;
21039            private int version_ ;
21040            /**
21041             * <code>optional uint32 version = 1;</code>
21042             */
21043            public boolean hasVersion() {
21044              return ((bitField0_ & 0x00000001) == 0x00000001);
21045            }
21046            /**
21047             * <code>optional uint32 version = 1;</code>
21048             */
21049            public int getVersion() {
21050              return version_;
21051            }
21052            /**
21053             * <code>optional uint32 version = 1;</code>
21054             */
21055            public Builder setVersion(int value) {
21056              bitField0_ |= 0x00000001;
21057              version_ = value;
21058              onChanged();
21059              return this;
21060            }
21061            /**
21062             * <code>optional uint32 version = 1;</code>
21063             */
21064            public Builder clearVersion() {
21065              bitField0_ = (bitField0_ & ~0x00000001);
21066              version_ = 0;
21067              onChanged();
21068              return this;
21069            }
21070    
21071            // optional string owner = 2;
21072            private java.lang.Object owner_ = "";
21073            /**
21074             * <code>optional string owner = 2;</code>
21075             */
21076            public boolean hasOwner() {
21077              return ((bitField0_ & 0x00000002) == 0x00000002);
21078            }
21079            /**
21080             * <code>optional string owner = 2;</code>
21081             */
21082            public java.lang.String getOwner() {
21083              java.lang.Object ref = owner_;
21084              if (!(ref instanceof java.lang.String)) {
21085                java.lang.String s = ((com.google.protobuf.ByteString) ref)
21086                    .toStringUtf8();
21087                owner_ = s;
21088                return s;
21089              } else {
21090                return (java.lang.String) ref;
21091              }
21092            }
21093            /**
21094             * <code>optional string owner = 2;</code>
21095             */
21096            public com.google.protobuf.ByteString
21097                getOwnerBytes() {
21098              java.lang.Object ref = owner_;
21099              if (ref instanceof String) {
21100                com.google.protobuf.ByteString b = 
21101                    com.google.protobuf.ByteString.copyFromUtf8(
21102                        (java.lang.String) ref);
21103                owner_ = b;
21104                return b;
21105              } else {
21106                return (com.google.protobuf.ByteString) ref;
21107              }
21108            }
21109            /**
21110             * <code>optional string owner = 2;</code>
21111             */
21112            public Builder setOwner(
21113                java.lang.String value) {
21114              if (value == null) {
21115        throw new NullPointerException();
21116      }
21117      bitField0_ |= 0x00000002;
21118              owner_ = value;
21119              onChanged();
21120              return this;
21121            }
21122            /**
21123             * <code>optional string owner = 2;</code>
21124             */
21125            public Builder clearOwner() {
21126              bitField0_ = (bitField0_ & ~0x00000002);
21127              owner_ = getDefaultInstance().getOwner();
21128              onChanged();
21129              return this;
21130            }
21131            /**
21132             * <code>optional string owner = 2;</code>
21133             */
21134            public Builder setOwnerBytes(
21135                com.google.protobuf.ByteString value) {
21136              if (value == null) {
21137        throw new NullPointerException();
21138      }
21139      bitField0_ |= 0x00000002;
21140              owner_ = value;
21141              onChanged();
21142              return this;
21143            }
21144    
21145            // optional string renewer = 3;
21146            private java.lang.Object renewer_ = "";
21147            /**
21148             * <code>optional string renewer = 3;</code>
21149             */
21150            public boolean hasRenewer() {
21151              return ((bitField0_ & 0x00000004) == 0x00000004);
21152            }
21153            /**
21154             * <code>optional string renewer = 3;</code>
21155             */
21156            public java.lang.String getRenewer() {
21157              java.lang.Object ref = renewer_;
21158              if (!(ref instanceof java.lang.String)) {
21159                java.lang.String s = ((com.google.protobuf.ByteString) ref)
21160                    .toStringUtf8();
21161                renewer_ = s;
21162                return s;
21163              } else {
21164                return (java.lang.String) ref;
21165              }
21166            }
21167            /**
21168             * <code>optional string renewer = 3;</code>
21169             */
21170            public com.google.protobuf.ByteString
21171                getRenewerBytes() {
21172              java.lang.Object ref = renewer_;
21173              if (ref instanceof String) {
21174                com.google.protobuf.ByteString b = 
21175                    com.google.protobuf.ByteString.copyFromUtf8(
21176                        (java.lang.String) ref);
21177                renewer_ = b;
21178                return b;
21179              } else {
21180                return (com.google.protobuf.ByteString) ref;
21181              }
21182            }
21183            /**
21184             * <code>optional string renewer = 3;</code>
21185             */
21186            public Builder setRenewer(
21187                java.lang.String value) {
21188              if (value == null) {
21189        throw new NullPointerException();
21190      }
21191      bitField0_ |= 0x00000004;
21192              renewer_ = value;
21193              onChanged();
21194              return this;
21195            }
21196            /**
21197             * <code>optional string renewer = 3;</code>
21198             */
21199            public Builder clearRenewer() {
21200              bitField0_ = (bitField0_ & ~0x00000004);
21201              renewer_ = getDefaultInstance().getRenewer();
21202              onChanged();
21203              return this;
21204            }
21205            /**
21206             * <code>optional string renewer = 3;</code>
21207             */
21208            public Builder setRenewerBytes(
21209                com.google.protobuf.ByteString value) {
21210              if (value == null) {
21211        throw new NullPointerException();
21212      }
21213      bitField0_ |= 0x00000004;
21214              renewer_ = value;
21215              onChanged();
21216              return this;
21217            }
21218    
21219            // optional string realUser = 4;
21220            private java.lang.Object realUser_ = "";
21221            /**
21222             * <code>optional string realUser = 4;</code>
21223             */
21224            public boolean hasRealUser() {
21225              return ((bitField0_ & 0x00000008) == 0x00000008);
21226            }
21227            /**
21228             * <code>optional string realUser = 4;</code>
21229             */
21230            public java.lang.String getRealUser() {
21231              java.lang.Object ref = realUser_;
21232              if (!(ref instanceof java.lang.String)) {
21233                java.lang.String s = ((com.google.protobuf.ByteString) ref)
21234                    .toStringUtf8();
21235                realUser_ = s;
21236                return s;
21237              } else {
21238                return (java.lang.String) ref;
21239              }
21240            }
21241            /**
21242             * <code>optional string realUser = 4;</code>
21243             */
21244            public com.google.protobuf.ByteString
21245                getRealUserBytes() {
21246              java.lang.Object ref = realUser_;
21247              if (ref instanceof String) {
21248                com.google.protobuf.ByteString b = 
21249                    com.google.protobuf.ByteString.copyFromUtf8(
21250                        (java.lang.String) ref);
21251                realUser_ = b;
21252                return b;
21253              } else {
21254                return (com.google.protobuf.ByteString) ref;
21255              }
21256            }
21257            /**
21258             * <code>optional string realUser = 4;</code>
21259             */
21260            public Builder setRealUser(
21261                java.lang.String value) {
21262              if (value == null) {
21263        throw new NullPointerException();
21264      }
21265      bitField0_ |= 0x00000008;
21266              realUser_ = value;
21267              onChanged();
21268              return this;
21269            }
21270            /**
21271             * <code>optional string realUser = 4;</code>
21272             */
21273            public Builder clearRealUser() {
21274              bitField0_ = (bitField0_ & ~0x00000008);
21275              realUser_ = getDefaultInstance().getRealUser();
21276              onChanged();
21277              return this;
21278            }
21279            /**
21280             * <code>optional string realUser = 4;</code>
21281             */
21282            public Builder setRealUserBytes(
21283                com.google.protobuf.ByteString value) {
21284              if (value == null) {
21285        throw new NullPointerException();
21286      }
21287      bitField0_ |= 0x00000008;
21288              realUser_ = value;
21289              onChanged();
21290              return this;
21291            }
21292    
21293            // optional uint64 issueDate = 5;
21294            private long issueDate_ ;
21295            /**
21296             * <code>optional uint64 issueDate = 5;</code>
21297             */
21298            public boolean hasIssueDate() {
21299              return ((bitField0_ & 0x00000010) == 0x00000010);
21300            }
21301            /**
21302             * <code>optional uint64 issueDate = 5;</code>
21303             */
21304            public long getIssueDate() {
21305              return issueDate_;
21306            }
21307            /**
21308             * <code>optional uint64 issueDate = 5;</code>
21309             */
21310            public Builder setIssueDate(long value) {
21311              bitField0_ |= 0x00000010;
21312              issueDate_ = value;
21313              onChanged();
21314              return this;
21315            }
21316            /**
21317             * <code>optional uint64 issueDate = 5;</code>
21318             */
21319            public Builder clearIssueDate() {
21320              bitField0_ = (bitField0_ & ~0x00000010);
21321              issueDate_ = 0L;
21322              onChanged();
21323              return this;
21324            }
21325    
21326            // optional uint64 maxDate = 6;
21327            private long maxDate_ ;
21328            /**
21329             * <code>optional uint64 maxDate = 6;</code>
21330             */
21331            public boolean hasMaxDate() {
21332              return ((bitField0_ & 0x00000020) == 0x00000020);
21333            }
21334            /**
21335             * <code>optional uint64 maxDate = 6;</code>
21336             */
21337            public long getMaxDate() {
21338              return maxDate_;
21339            }
21340            /**
21341             * <code>optional uint64 maxDate = 6;</code>
21342             */
21343            public Builder setMaxDate(long value) {
21344              bitField0_ |= 0x00000020;
21345              maxDate_ = value;
21346              onChanged();
21347              return this;
21348            }
21349            /**
21350             * <code>optional uint64 maxDate = 6;</code>
21351             */
21352            public Builder clearMaxDate() {
21353              bitField0_ = (bitField0_ & ~0x00000020);
21354              maxDate_ = 0L;
21355              onChanged();
21356              return this;
21357            }
21358    
21359            // optional uint32 sequenceNumber = 7;
21360            private int sequenceNumber_ ;
21361            /**
21362             * <code>optional uint32 sequenceNumber = 7;</code>
21363             */
21364            public boolean hasSequenceNumber() {
21365              return ((bitField0_ & 0x00000040) == 0x00000040);
21366            }
21367            /**
21368             * <code>optional uint32 sequenceNumber = 7;</code>
21369             */
21370            public int getSequenceNumber() {
21371              return sequenceNumber_;
21372            }
21373            /**
21374             * <code>optional uint32 sequenceNumber = 7;</code>
21375             */
21376            public Builder setSequenceNumber(int value) {
21377              bitField0_ |= 0x00000040;
21378              sequenceNumber_ = value;
21379              onChanged();
21380              return this;
21381            }
21382            /**
21383             * <code>optional uint32 sequenceNumber = 7;</code>
21384             */
21385            public Builder clearSequenceNumber() {
21386              bitField0_ = (bitField0_ & ~0x00000040);
21387              sequenceNumber_ = 0;
21388              onChanged();
21389              return this;
21390            }
21391    
21392            // optional uint32 masterKeyId = 8;
21393            private int masterKeyId_ ;
21394            /**
21395             * <code>optional uint32 masterKeyId = 8;</code>
21396             */
21397            public boolean hasMasterKeyId() {
21398              return ((bitField0_ & 0x00000080) == 0x00000080);
21399            }
21400            /**
21401             * <code>optional uint32 masterKeyId = 8;</code>
21402             */
21403            public int getMasterKeyId() {
21404              return masterKeyId_;
21405            }
21406            /**
21407             * <code>optional uint32 masterKeyId = 8;</code>
21408             */
21409            public Builder setMasterKeyId(int value) {
21410              bitField0_ |= 0x00000080;
21411              masterKeyId_ = value;
21412              onChanged();
21413              return this;
21414            }
21415            /**
21416             * <code>optional uint32 masterKeyId = 8;</code>
21417             */
21418            public Builder clearMasterKeyId() {
21419              bitField0_ = (bitField0_ & ~0x00000080);
21420              masterKeyId_ = 0;
21421              onChanged();
21422              return this;
21423            }
21424    
21425            // optional uint64 expiryDate = 9;
21426            private long expiryDate_ ;
21427            /**
21428             * <code>optional uint64 expiryDate = 9;</code>
21429             */
21430            public boolean hasExpiryDate() {
21431              return ((bitField0_ & 0x00000100) == 0x00000100);
21432            }
21433            /**
21434             * <code>optional uint64 expiryDate = 9;</code>
21435             */
21436            public long getExpiryDate() {
21437              return expiryDate_;
21438            }
21439            /**
21440             * <code>optional uint64 expiryDate = 9;</code>
21441             */
21442            public Builder setExpiryDate(long value) {
21443              bitField0_ |= 0x00000100;
21444              expiryDate_ = value;
21445              onChanged();
21446              return this;
21447            }
21448            /**
21449             * <code>optional uint64 expiryDate = 9;</code>
21450             */
21451            public Builder clearExpiryDate() {
21452              bitField0_ = (bitField0_ & ~0x00000100);
21453              expiryDate_ = 0L;
21454              onChanged();
21455              return this;
21456            }
21457    
21458            // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SecretManagerSection.PersistToken)
21459          }
21460    
21461          static {
21462            defaultInstance = new PersistToken(true);
21463            defaultInstance.initFields();
21464          }
21465    
21466          // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SecretManagerSection.PersistToken)
21467        }
21468    
21469        private int bitField0_;
21470        // optional uint32 currentId = 1;
21471        public static final int CURRENTID_FIELD_NUMBER = 1;
21472        private int currentId_;
21473        /**
21474         * <code>optional uint32 currentId = 1;</code>
21475         */
21476        public boolean hasCurrentId() {
21477          return ((bitField0_ & 0x00000001) == 0x00000001);
21478        }
21479        /**
21480         * <code>optional uint32 currentId = 1;</code>
21481         */
21482        public int getCurrentId() {
21483          return currentId_;
21484        }
21485    
21486        // optional uint32 tokenSequenceNumber = 2;
21487        public static final int TOKENSEQUENCENUMBER_FIELD_NUMBER = 2;
21488        private int tokenSequenceNumber_;
21489        /**
21490         * <code>optional uint32 tokenSequenceNumber = 2;</code>
21491         */
21492        public boolean hasTokenSequenceNumber() {
21493          return ((bitField0_ & 0x00000002) == 0x00000002);
21494        }
21495        /**
21496         * <code>optional uint32 tokenSequenceNumber = 2;</code>
21497         */
21498        public int getTokenSequenceNumber() {
21499          return tokenSequenceNumber_;
21500        }
21501    
21502        // optional uint32 numKeys = 3;
21503        public static final int NUMKEYS_FIELD_NUMBER = 3;
21504        private int numKeys_;
21505        /**
21506         * <code>optional uint32 numKeys = 3;</code>
21507         */
21508        public boolean hasNumKeys() {
21509          return ((bitField0_ & 0x00000004) == 0x00000004);
21510        }
21511        /**
21512         * <code>optional uint32 numKeys = 3;</code>
21513         */
21514        public int getNumKeys() {
21515          return numKeys_;
21516        }
21517    
21518        // optional uint32 numTokens = 4;
21519        public static final int NUMTOKENS_FIELD_NUMBER = 4;
21520        private int numTokens_;
21521        /**
21522         * <code>optional uint32 numTokens = 4;</code>
21523         *
21524         * <pre>
21525         * repeated DelegationKey keys
21526         * repeated PersistToken tokens
21527         * </pre>
21528         */
21529        public boolean hasNumTokens() {
21530          return ((bitField0_ & 0x00000008) == 0x00000008);
21531        }
21532        /**
21533         * <code>optional uint32 numTokens = 4;</code>
21534         *
21535         * <pre>
21536         * repeated DelegationKey keys
21537         * repeated PersistToken tokens
21538         * </pre>
21539         */
21540        public int getNumTokens() {
21541          return numTokens_;
21542        }
21543    
21544        private void initFields() {
21545          currentId_ = 0;
21546          tokenSequenceNumber_ = 0;
21547          numKeys_ = 0;
21548          numTokens_ = 0;
21549        }
21550        private byte memoizedIsInitialized = -1;
21551        public final boolean isInitialized() {
21552          byte isInitialized = memoizedIsInitialized;
21553          if (isInitialized != -1) return isInitialized == 1;
21554    
21555          memoizedIsInitialized = 1;
21556          return true;
21557        }
21558    
21559        public void writeTo(com.google.protobuf.CodedOutputStream output)
21560                            throws java.io.IOException {
21561          getSerializedSize();
21562          if (((bitField0_ & 0x00000001) == 0x00000001)) {
21563            output.writeUInt32(1, currentId_);
21564          }
21565          if (((bitField0_ & 0x00000002) == 0x00000002)) {
21566            output.writeUInt32(2, tokenSequenceNumber_);
21567          }
21568          if (((bitField0_ & 0x00000004) == 0x00000004)) {
21569            output.writeUInt32(3, numKeys_);
21570          }
21571          if (((bitField0_ & 0x00000008) == 0x00000008)) {
21572            output.writeUInt32(4, numTokens_);
21573          }
21574          getUnknownFields().writeTo(output);
21575        }
21576    
21577        private int memoizedSerializedSize = -1;
21578        public int getSerializedSize() {
21579          int size = memoizedSerializedSize;
21580          if (size != -1) return size;
21581    
21582          size = 0;
21583          if (((bitField0_ & 0x00000001) == 0x00000001)) {
21584            size += com.google.protobuf.CodedOutputStream
21585              .computeUInt32Size(1, currentId_);
21586          }
21587          if (((bitField0_ & 0x00000002) == 0x00000002)) {
21588            size += com.google.protobuf.CodedOutputStream
21589              .computeUInt32Size(2, tokenSequenceNumber_);
21590          }
21591          if (((bitField0_ & 0x00000004) == 0x00000004)) {
21592            size += com.google.protobuf.CodedOutputStream
21593              .computeUInt32Size(3, numKeys_);
21594          }
21595          if (((bitField0_ & 0x00000008) == 0x00000008)) {
21596            size += com.google.protobuf.CodedOutputStream
21597              .computeUInt32Size(4, numTokens_);
21598          }
21599          size += getUnknownFields().getSerializedSize();
21600          memoizedSerializedSize = size;
21601          return size;
21602        }
21603    
21604        private static final long serialVersionUID = 0L;
21605        @java.lang.Override
21606        protected java.lang.Object writeReplace()
21607            throws java.io.ObjectStreamException {
21608          return super.writeReplace();
21609        }
21610    
21611        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21612            com.google.protobuf.ByteString data)
21613            throws com.google.protobuf.InvalidProtocolBufferException {
21614          return PARSER.parseFrom(data);
21615        }
21616        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21617            com.google.protobuf.ByteString data,
21618            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21619            throws com.google.protobuf.InvalidProtocolBufferException {
21620          return PARSER.parseFrom(data, extensionRegistry);
21621        }
21622        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(byte[] data)
21623            throws com.google.protobuf.InvalidProtocolBufferException {
21624          return PARSER.parseFrom(data);
21625        }
21626        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21627            byte[] data,
21628            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21629            throws com.google.protobuf.InvalidProtocolBufferException {
21630          return PARSER.parseFrom(data, extensionRegistry);
21631        }
21632        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(java.io.InputStream input)
21633            throws java.io.IOException {
21634          return PARSER.parseFrom(input);
21635        }
21636        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21637            java.io.InputStream input,
21638            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21639            throws java.io.IOException {
21640          return PARSER.parseFrom(input, extensionRegistry);
21641        }
21642        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseDelimitedFrom(java.io.InputStream input)
21643            throws java.io.IOException {
21644          return PARSER.parseDelimitedFrom(input);
21645        }
21646        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseDelimitedFrom(
21647            java.io.InputStream input,
21648            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21649            throws java.io.IOException {
21650          return PARSER.parseDelimitedFrom(input, extensionRegistry);
21651        }
21652        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21653            com.google.protobuf.CodedInputStream input)
21654            throws java.io.IOException {
21655          return PARSER.parseFrom(input);
21656        }
21657        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21658            com.google.protobuf.CodedInputStream input,
21659            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21660            throws java.io.IOException {
21661          return PARSER.parseFrom(input, extensionRegistry);
21662        }
21663    
21664        public static Builder newBuilder() { return Builder.create(); }
21665        public Builder newBuilderForType() { return newBuilder(); }
21666        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection prototype) {
21667          return newBuilder().mergeFrom(prototype);
21668        }
21669        public Builder toBuilder() { return newBuilder(this); }
21670    
21671        @java.lang.Override
21672        protected Builder newBuilderForType(
21673            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21674          Builder builder = new Builder(parent);
21675          return builder;
21676        }
21677        /**
21678         * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection}
21679         */
21680        public static final class Builder extends
21681            com.google.protobuf.GeneratedMessage.Builder<Builder>
21682           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSectionOrBuilder {
21683          public static final com.google.protobuf.Descriptors.Descriptor
21684              getDescriptor() {
21685            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
21686          }
21687    
21688          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21689              internalGetFieldAccessorTable() {
21690            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable
21691                .ensureFieldAccessorsInitialized(
21692                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.Builder.class);
21693          }
21694    
21695          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.newBuilder()
21696          private Builder() {
21697            maybeForceBuilderInitialization();
21698          }
21699    
21700          private Builder(
21701              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21702            super(parent);
21703            maybeForceBuilderInitialization();
21704          }
21705          private void maybeForceBuilderInitialization() {
21706            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
21707            }
21708          }
21709          private static Builder create() {
21710            return new Builder();
21711          }
21712    
21713          public Builder clear() {
21714            super.clear();
21715            currentId_ = 0;
21716            bitField0_ = (bitField0_ & ~0x00000001);
21717            tokenSequenceNumber_ = 0;
21718            bitField0_ = (bitField0_ & ~0x00000002);
21719            numKeys_ = 0;
21720            bitField0_ = (bitField0_ & ~0x00000004);
21721            numTokens_ = 0;
21722            bitField0_ = (bitField0_ & ~0x00000008);
21723            return this;
21724          }
21725    
21726          public Builder clone() {
21727            return create().mergeFrom(buildPartial());
21728          }
21729    
21730          public com.google.protobuf.Descriptors.Descriptor
21731              getDescriptorForType() {
21732            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
21733          }
21734    
21735          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection getDefaultInstanceForType() {
21736            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.getDefaultInstance();
21737          }
21738    
21739          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection build() {
21740            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection result = buildPartial();
21741            if (!result.isInitialized()) {
21742              throw newUninitializedMessageException(result);
21743            }
21744            return result;
21745          }
21746    
21747          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection buildPartial() {
21748            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection(this);
21749            int from_bitField0_ = bitField0_;
21750            int to_bitField0_ = 0;
21751            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
21752              to_bitField0_ |= 0x00000001;
21753            }
21754            result.currentId_ = currentId_;
21755            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
21756              to_bitField0_ |= 0x00000002;
21757            }
21758            result.tokenSequenceNumber_ = tokenSequenceNumber_;
21759            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
21760              to_bitField0_ |= 0x00000004;
21761            }
21762            result.numKeys_ = numKeys_;
21763            if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
21764              to_bitField0_ |= 0x00000008;
21765            }
21766            result.numTokens_ = numTokens_;
21767            result.bitField0_ = to_bitField0_;
21768            onBuilt();
21769            return result;
21770          }
21771    
21772          public Builder mergeFrom(com.google.protobuf.Message other) {
21773            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection) {
21774              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection)other);
21775            } else {
21776              super.mergeFrom(other);
21777              return this;
21778            }
21779          }
21780    
21781          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection other) {
21782            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.getDefaultInstance()) return this;
21783            if (other.hasCurrentId()) {
21784              setCurrentId(other.getCurrentId());
21785            }
21786            if (other.hasTokenSequenceNumber()) {
21787              setTokenSequenceNumber(other.getTokenSequenceNumber());
21788            }
21789            if (other.hasNumKeys()) {
21790              setNumKeys(other.getNumKeys());
21791            }
21792            if (other.hasNumTokens()) {
21793              setNumTokens(other.getNumTokens());
21794            }
21795            this.mergeUnknownFields(other.getUnknownFields());
21796            return this;
21797          }
21798    
21799          public final boolean isInitialized() {
21800            return true;
21801          }
21802    
21803          public Builder mergeFrom(
21804              com.google.protobuf.CodedInputStream input,
21805              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21806              throws java.io.IOException {
21807            org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parsedMessage = null;
21808            try {
21809              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21810            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21811              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection) e.getUnfinishedMessage();
21812              throw e;
21813            } finally {
21814              if (parsedMessage != null) {
21815                mergeFrom(parsedMessage);
21816              }
21817            }
21818            return this;
21819          }
21820          private int bitField0_;
21821    
21822          // optional uint32 currentId = 1;
21823          private int currentId_ ;
21824          /**
21825           * <code>optional uint32 currentId = 1;</code>
21826           */
21827          public boolean hasCurrentId() {
21828            return ((bitField0_ & 0x00000001) == 0x00000001);
21829          }
21830          /**
21831           * <code>optional uint32 currentId = 1;</code>
21832           */
21833          public int getCurrentId() {
21834            return currentId_;
21835          }
21836          /**
21837           * <code>optional uint32 currentId = 1;</code>
21838           */
21839          public Builder setCurrentId(int value) {
21840            bitField0_ |= 0x00000001;
21841            currentId_ = value;
21842            onChanged();
21843            return this;
21844          }
21845          /**
21846           * <code>optional uint32 currentId = 1;</code>
21847           */
21848          public Builder clearCurrentId() {
21849            bitField0_ = (bitField0_ & ~0x00000001);
21850            currentId_ = 0;
21851            onChanged();
21852            return this;
21853          }
21854    
21855          // optional uint32 tokenSequenceNumber = 2;
21856          private int tokenSequenceNumber_ ;
21857          /**
21858           * <code>optional uint32 tokenSequenceNumber = 2;</code>
21859           */
21860          public boolean hasTokenSequenceNumber() {
21861            return ((bitField0_ & 0x00000002) == 0x00000002);
21862          }
21863          /**
21864           * <code>optional uint32 tokenSequenceNumber = 2;</code>
21865           */
21866          public int getTokenSequenceNumber() {
21867            return tokenSequenceNumber_;
21868          }
21869          /**
21870           * <code>optional uint32 tokenSequenceNumber = 2;</code>
21871           */
21872          public Builder setTokenSequenceNumber(int value) {
21873            bitField0_ |= 0x00000002;
21874            tokenSequenceNumber_ = value;
21875            onChanged();
21876            return this;
21877          }
21878          /**
21879           * <code>optional uint32 tokenSequenceNumber = 2;</code>
21880           */
21881          public Builder clearTokenSequenceNumber() {
21882            bitField0_ = (bitField0_ & ~0x00000002);
21883            tokenSequenceNumber_ = 0;
21884            onChanged();
21885            return this;
21886          }
21887    
21888          // optional uint32 numKeys = 3;
21889          private int numKeys_ ;
21890          /**
21891           * <code>optional uint32 numKeys = 3;</code>
21892           */
21893          public boolean hasNumKeys() {
21894            return ((bitField0_ & 0x00000004) == 0x00000004);
21895          }
21896          /**
21897           * <code>optional uint32 numKeys = 3;</code>
21898           */
21899          public int getNumKeys() {
21900            return numKeys_;
21901          }
21902          /**
21903           * <code>optional uint32 numKeys = 3;</code>
21904           */
21905          public Builder setNumKeys(int value) {
21906            bitField0_ |= 0x00000004;
21907            numKeys_ = value;
21908            onChanged();
21909            return this;
21910          }
21911          /**
21912           * <code>optional uint32 numKeys = 3;</code>
21913           */
21914          public Builder clearNumKeys() {
21915            bitField0_ = (bitField0_ & ~0x00000004);
21916            numKeys_ = 0;
21917            onChanged();
21918            return this;
21919          }
21920    
21921          // optional uint32 numTokens = 4;
21922          private int numTokens_ ;
21923          /**
21924           * <code>optional uint32 numTokens = 4;</code>
21925           *
21926           * <pre>
21927           * repeated DelegationKey keys
21928           * repeated PersistToken tokens
21929           * </pre>
21930           */
21931          public boolean hasNumTokens() {
21932            return ((bitField0_ & 0x00000008) == 0x00000008);
21933          }
21934          /**
21935           * <code>optional uint32 numTokens = 4;</code>
21936           *
21937           * <pre>
21938           * repeated DelegationKey keys
21939           * repeated PersistToken tokens
21940           * </pre>
21941           */
21942          public int getNumTokens() {
21943            return numTokens_;
21944          }
21945          /**
21946           * <code>optional uint32 numTokens = 4;</code>
21947           *
21948           * <pre>
21949           * repeated DelegationKey keys
21950           * repeated PersistToken tokens
21951           * </pre>
21952           */
21953          public Builder setNumTokens(int value) {
21954            bitField0_ |= 0x00000008;
21955            numTokens_ = value;
21956            onChanged();
21957            return this;
21958          }
21959          /**
21960           * <code>optional uint32 numTokens = 4;</code>
21961           *
21962           * <pre>
21963           * repeated DelegationKey keys
21964           * repeated PersistToken tokens
21965           * </pre>
21966           */
21967          public Builder clearNumTokens() {
21968            bitField0_ = (bitField0_ & ~0x00000008);
21969            numTokens_ = 0;
21970            onChanged();
21971            return this;
21972          }
21973    
21974          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SecretManagerSection)
21975        }
21976    
21977        static {
21978          defaultInstance = new SecretManagerSection(true);
21979          defaultInstance.initFields();
21980        }
21981    
21982        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SecretManagerSection)
21983      }
21984    
21985      public interface CacheManagerSectionOrBuilder
21986          extends com.google.protobuf.MessageOrBuilder {
21987    
21988        // required uint64 nextDirectiveId = 1;
21989        /**
21990         * <code>required uint64 nextDirectiveId = 1;</code>
21991         */
21992        boolean hasNextDirectiveId();
21993        /**
21994         * <code>required uint64 nextDirectiveId = 1;</code>
21995         */
21996        long getNextDirectiveId();
21997    
21998        // required uint32 numPools = 2;
21999        /**
22000         * <code>required uint32 numPools = 2;</code>
22001         */
22002        boolean hasNumPools();
22003        /**
22004         * <code>required uint32 numPools = 2;</code>
22005         */
22006        int getNumPools();
22007    
22008        // required uint32 numDirectives = 3;
22009        /**
22010         * <code>required uint32 numDirectives = 3;</code>
22011         *
22012         * <pre>
22013         * repeated CachePoolInfoProto pools
22014         * repeated CacheDirectiveInfoProto directives
22015         * </pre>
22016         */
22017        boolean hasNumDirectives();
22018        /**
22019         * <code>required uint32 numDirectives = 3;</code>
22020         *
22021         * <pre>
22022         * repeated CachePoolInfoProto pools
22023         * repeated CacheDirectiveInfoProto directives
22024         * </pre>
22025         */
22026        int getNumDirectives();
22027      }
22028      /**
22029       * Protobuf type {@code hadoop.hdfs.fsimage.CacheManagerSection}
22030       */
22031      public static final class CacheManagerSection extends
22032          com.google.protobuf.GeneratedMessage
22033          implements CacheManagerSectionOrBuilder {
22034        // Use CacheManagerSection.newBuilder() to construct.
22035        private CacheManagerSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
22036          super(builder);
22037          this.unknownFields = builder.getUnknownFields();
22038        }
22039        private CacheManagerSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
22040    
22041        private static final CacheManagerSection defaultInstance;
22042        public static CacheManagerSection getDefaultInstance() {
22043          return defaultInstance;
22044        }
22045    
22046        public CacheManagerSection getDefaultInstanceForType() {
22047          return defaultInstance;
22048        }
22049    
22050        private final com.google.protobuf.UnknownFieldSet unknownFields;
22051        @java.lang.Override
22052        public final com.google.protobuf.UnknownFieldSet
22053            getUnknownFields() {
22054          return this.unknownFields;
22055        }
22056        private CacheManagerSection(
22057            com.google.protobuf.CodedInputStream input,
22058            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22059            throws com.google.protobuf.InvalidProtocolBufferException {
22060          initFields();
22061          int mutable_bitField0_ = 0;
22062          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
22063              com.google.protobuf.UnknownFieldSet.newBuilder();
22064          try {
22065            boolean done = false;
22066            while (!done) {
22067              int tag = input.readTag();
22068              switch (tag) {
22069                case 0:
22070                  done = true;
22071                  break;
22072                default: {
22073                  if (!parseUnknownField(input, unknownFields,
22074                                         extensionRegistry, tag)) {
22075                    done = true;
22076                  }
22077                  break;
22078                }
22079                case 8: {
22080                  bitField0_ |= 0x00000001;
22081                  nextDirectiveId_ = input.readUInt64();
22082                  break;
22083                }
22084                case 16: {
22085                  bitField0_ |= 0x00000002;
22086                  numPools_ = input.readUInt32();
22087                  break;
22088                }
22089                case 24: {
22090                  bitField0_ |= 0x00000004;
22091                  numDirectives_ = input.readUInt32();
22092                  break;
22093                }
22094              }
22095            }
22096          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22097            throw e.setUnfinishedMessage(this);
22098          } catch (java.io.IOException e) {
22099            throw new com.google.protobuf.InvalidProtocolBufferException(
22100                e.getMessage()).setUnfinishedMessage(this);
22101          } finally {
22102            this.unknownFields = unknownFields.build();
22103            makeExtensionsImmutable();
22104          }
22105        }
22106        public static final com.google.protobuf.Descriptors.Descriptor
22107            getDescriptor() {
22108          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
22109        }
22110    
22111        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
22112            internalGetFieldAccessorTable() {
22113          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable
22114              .ensureFieldAccessorsInitialized(
22115                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.Builder.class);
22116        }
22117    
22118        public static com.google.protobuf.Parser<CacheManagerSection> PARSER =
22119            new com.google.protobuf.AbstractParser<CacheManagerSection>() {
22120          public CacheManagerSection parsePartialFrom(
22121              com.google.protobuf.CodedInputStream input,
22122              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22123              throws com.google.protobuf.InvalidProtocolBufferException {
22124            return new CacheManagerSection(input, extensionRegistry);
22125          }
22126        };
22127    
22128        @java.lang.Override
22129        public com.google.protobuf.Parser<CacheManagerSection> getParserForType() {
22130          return PARSER;
22131        }
22132    
22133        private int bitField0_;
22134        // required uint64 nextDirectiveId = 1;
22135        public static final int NEXTDIRECTIVEID_FIELD_NUMBER = 1;
22136        private long nextDirectiveId_;
22137        /**
22138         * <code>required uint64 nextDirectiveId = 1;</code>
22139         */
22140        public boolean hasNextDirectiveId() {
22141          return ((bitField0_ & 0x00000001) == 0x00000001);
22142        }
22143        /**
22144         * <code>required uint64 nextDirectiveId = 1;</code>
22145         */
22146        public long getNextDirectiveId() {
22147          return nextDirectiveId_;
22148        }
22149    
22150        // required uint32 numPools = 2;
22151        public static final int NUMPOOLS_FIELD_NUMBER = 2;
22152        private int numPools_;
22153        /**
22154         * <code>required uint32 numPools = 2;</code>
22155         */
22156        public boolean hasNumPools() {
22157          return ((bitField0_ & 0x00000002) == 0x00000002);
22158        }
22159        /**
22160         * <code>required uint32 numPools = 2;</code>
22161         */
22162        public int getNumPools() {
22163          return numPools_;
22164        }
22165    
22166        // required uint32 numDirectives = 3;
22167        public static final int NUMDIRECTIVES_FIELD_NUMBER = 3;
22168        private int numDirectives_;
22169        /**
22170         * <code>required uint32 numDirectives = 3;</code>
22171         *
22172         * <pre>
22173         * repeated CachePoolInfoProto pools
22174         * repeated CacheDirectiveInfoProto directives
22175         * </pre>
22176         */
22177        public boolean hasNumDirectives() {
22178          return ((bitField0_ & 0x00000004) == 0x00000004);
22179        }
22180        /**
22181         * <code>required uint32 numDirectives = 3;</code>
22182         *
22183         * <pre>
22184         * repeated CachePoolInfoProto pools
22185         * repeated CacheDirectiveInfoProto directives
22186         * </pre>
22187         */
22188        public int getNumDirectives() {
22189          return numDirectives_;
22190        }
22191    
22192        private void initFields() {
22193          nextDirectiveId_ = 0L;
22194          numPools_ = 0;
22195          numDirectives_ = 0;
22196        }
22197        private byte memoizedIsInitialized = -1;
22198        public final boolean isInitialized() {
22199          byte isInitialized = memoizedIsInitialized;
22200          if (isInitialized != -1) return isInitialized == 1;
22201    
22202          if (!hasNextDirectiveId()) {
22203            memoizedIsInitialized = 0;
22204            return false;
22205          }
22206          if (!hasNumPools()) {
22207            memoizedIsInitialized = 0;
22208            return false;
22209          }
22210          if (!hasNumDirectives()) {
22211            memoizedIsInitialized = 0;
22212            return false;
22213          }
22214          memoizedIsInitialized = 1;
22215          return true;
22216        }
22217    
22218        public void writeTo(com.google.protobuf.CodedOutputStream output)
22219                            throws java.io.IOException {
22220          getSerializedSize();
22221          if (((bitField0_ & 0x00000001) == 0x00000001)) {
22222            output.writeUInt64(1, nextDirectiveId_);
22223          }
22224          if (((bitField0_ & 0x00000002) == 0x00000002)) {
22225            output.writeUInt32(2, numPools_);
22226          }
22227          if (((bitField0_ & 0x00000004) == 0x00000004)) {
22228            output.writeUInt32(3, numDirectives_);
22229          }
22230          getUnknownFields().writeTo(output);
22231        }
22232    
22233        private int memoizedSerializedSize = -1;
22234        public int getSerializedSize() {
22235          int size = memoizedSerializedSize;
22236          if (size != -1) return size;
22237    
22238          size = 0;
22239          if (((bitField0_ & 0x00000001) == 0x00000001)) {
22240            size += com.google.protobuf.CodedOutputStream
22241              .computeUInt64Size(1, nextDirectiveId_);
22242          }
22243          if (((bitField0_ & 0x00000002) == 0x00000002)) {
22244            size += com.google.protobuf.CodedOutputStream
22245              .computeUInt32Size(2, numPools_);
22246          }
22247          if (((bitField0_ & 0x00000004) == 0x00000004)) {
22248            size += com.google.protobuf.CodedOutputStream
22249              .computeUInt32Size(3, numDirectives_);
22250          }
22251          size += getUnknownFields().getSerializedSize();
22252          memoizedSerializedSize = size;
22253          return size;
22254        }
22255    
22256        private static final long serialVersionUID = 0L;
22257        @java.lang.Override
22258        protected java.lang.Object writeReplace()
22259            throws java.io.ObjectStreamException {
22260          return super.writeReplace();
22261        }
22262    
22263        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22264            com.google.protobuf.ByteString data)
22265            throws com.google.protobuf.InvalidProtocolBufferException {
22266          return PARSER.parseFrom(data);
22267        }
22268        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22269            com.google.protobuf.ByteString data,
22270            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22271            throws com.google.protobuf.InvalidProtocolBufferException {
22272          return PARSER.parseFrom(data, extensionRegistry);
22273        }
22274        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(byte[] data)
22275            throws com.google.protobuf.InvalidProtocolBufferException {
22276          return PARSER.parseFrom(data);
22277        }
22278        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22279            byte[] data,
22280            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22281            throws com.google.protobuf.InvalidProtocolBufferException {
22282          return PARSER.parseFrom(data, extensionRegistry);
22283        }
22284        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(java.io.InputStream input)
22285            throws java.io.IOException {
22286          return PARSER.parseFrom(input);
22287        }
22288        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22289            java.io.InputStream input,
22290            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22291            throws java.io.IOException {
22292          return PARSER.parseFrom(input, extensionRegistry);
22293        }
22294        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseDelimitedFrom(java.io.InputStream input)
22295            throws java.io.IOException {
22296          return PARSER.parseDelimitedFrom(input);
22297        }
22298        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseDelimitedFrom(
22299            java.io.InputStream input,
22300            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22301            throws java.io.IOException {
22302          return PARSER.parseDelimitedFrom(input, extensionRegistry);
22303        }
22304        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22305            com.google.protobuf.CodedInputStream input)
22306            throws java.io.IOException {
22307          return PARSER.parseFrom(input);
22308        }
22309        public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22310            com.google.protobuf.CodedInputStream input,
22311            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22312            throws java.io.IOException {
22313          return PARSER.parseFrom(input, extensionRegistry);
22314        }
22315    
22316        public static Builder newBuilder() { return Builder.create(); }
22317        public Builder newBuilderForType() { return newBuilder(); }
22318        public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection prototype) {
22319          return newBuilder().mergeFrom(prototype);
22320        }
22321        public Builder toBuilder() { return newBuilder(this); }
22322    
22323        @java.lang.Override
22324        protected Builder newBuilderForType(
22325            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22326          Builder builder = new Builder(parent);
22327          return builder;
22328        }
22329        /**
22330         * Protobuf type {@code hadoop.hdfs.fsimage.CacheManagerSection}
22331         */
22332        public static final class Builder extends
22333            com.google.protobuf.GeneratedMessage.Builder<Builder>
22334           implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSectionOrBuilder {
22335          public static final com.google.protobuf.Descriptors.Descriptor
22336              getDescriptor() {
22337            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
22338          }
22339    
22340          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
22341              internalGetFieldAccessorTable() {
22342            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable
22343                .ensureFieldAccessorsInitialized(
22344                    org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.Builder.class);
22345          }
22346    
22347          // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.newBuilder()
22348          private Builder() {
22349            maybeForceBuilderInitialization();
22350          }
22351    
22352          private Builder(
22353              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22354            super(parent);
22355            maybeForceBuilderInitialization();
22356          }
22357          private void maybeForceBuilderInitialization() {
22358            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
22359            }
22360          }
22361          private static Builder create() {
22362            return new Builder();
22363          }
22364    
22365          public Builder clear() {
22366            super.clear();
22367            nextDirectiveId_ = 0L;
22368            bitField0_ = (bitField0_ & ~0x00000001);
22369            numPools_ = 0;
22370            bitField0_ = (bitField0_ & ~0x00000002);
22371            numDirectives_ = 0;
22372            bitField0_ = (bitField0_ & ~0x00000004);
22373            return this;
22374          }
22375    
22376          public Builder clone() {
22377            return create().mergeFrom(buildPartial());
22378          }
22379    
22380          public com.google.protobuf.Descriptors.Descriptor
22381              getDescriptorForType() {
22382            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
22383          }
22384    
22385          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection getDefaultInstanceForType() {
22386            return org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.getDefaultInstance();
22387          }
22388    
22389          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection build() {
22390            org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection result = buildPartial();
22391            if (!result.isInitialized()) {
22392              throw newUninitializedMessageException(result);
22393            }
22394            return result;
22395          }
22396    
22397          public org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection buildPartial() {
22398            org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection(this);
22399            int from_bitField0_ = bitField0_;
22400            int to_bitField0_ = 0;
22401            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
22402              to_bitField0_ |= 0x00000001;
22403            }
22404            result.nextDirectiveId_ = nextDirectiveId_;
22405            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
22406              to_bitField0_ |= 0x00000002;
22407            }
22408            result.numPools_ = numPools_;
22409            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
22410              to_bitField0_ |= 0x00000004;
22411            }
22412            result.numDirectives_ = numDirectives_;
22413            result.bitField0_ = to_bitField0_;
22414            onBuilt();
22415            return result;
22416          }
22417    
22418          public Builder mergeFrom(com.google.protobuf.Message other) {
22419            if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) {
22420              return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection)other);
22421            } else {
22422              super.mergeFrom(other);
22423              return this;
22424            }
22425          }
22426    
22427          public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection other) {
22428            if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.getDefaultInstance()) return this;
22429            if (other.hasNextDirectiveId()) {
22430              setNextDirectiveId(other.getNextDirectiveId());
22431            }
22432            if (other.hasNumPools()) {
22433              setNumPools(other.getNumPools());
22434            }
22435            if (other.hasNumDirectives()) {
22436              setNumDirectives(other.getNumDirectives());
22437            }
22438            this.mergeUnknownFields(other.getUnknownFields());
22439            return this;
22440          }
22441    
22442          public final boolean isInitialized() {
22443            if (!hasNextDirectiveId()) {
22444              
22445              return false;
22446            }
22447            if (!hasNumPools()) {
22448              
22449              return false;
22450            }
22451            if (!hasNumDirectives()) {
22452              
22453              return false;
22454            }
22455            return true;
22456          }
22457    
22458          public Builder mergeFrom(
22459              com.google.protobuf.CodedInputStream input,
22460              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22461              throws java.io.IOException {
22462            org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parsedMessage = null;
22463            try {
22464              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
22465            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22466              parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) e.getUnfinishedMessage();
22467              throw e;
22468            } finally {
22469              if (parsedMessage != null) {
22470                mergeFrom(parsedMessage);
22471              }
22472            }
22473            return this;
22474          }
22475          private int bitField0_;
22476    
22477          // required uint64 nextDirectiveId = 1;
22478          private long nextDirectiveId_ ;
22479          /**
22480           * <code>required uint64 nextDirectiveId = 1;</code>
22481           */
22482          public boolean hasNextDirectiveId() {
22483            return ((bitField0_ & 0x00000001) == 0x00000001);
22484          }
22485          /**
22486           * <code>required uint64 nextDirectiveId = 1;</code>
22487           */
22488          public long getNextDirectiveId() {
22489            return nextDirectiveId_;
22490          }
22491          /**
22492           * <code>required uint64 nextDirectiveId = 1;</code>
22493           */
22494          public Builder setNextDirectiveId(long value) {
22495            bitField0_ |= 0x00000001;
22496            nextDirectiveId_ = value;
22497            onChanged();
22498            return this;
22499          }
22500          /**
22501           * <code>required uint64 nextDirectiveId = 1;</code>
22502           */
22503          public Builder clearNextDirectiveId() {
22504            bitField0_ = (bitField0_ & ~0x00000001);
22505            nextDirectiveId_ = 0L;
22506            onChanged();
22507            return this;
22508          }
22509    
22510          // required uint32 numPools = 2;
22511          private int numPools_ ;
22512          /**
22513           * <code>required uint32 numPools = 2;</code>
22514           */
22515          public boolean hasNumPools() {
22516            return ((bitField0_ & 0x00000002) == 0x00000002);
22517          }
22518          /**
22519           * <code>required uint32 numPools = 2;</code>
22520           */
22521          public int getNumPools() {
22522            return numPools_;
22523          }
22524          /**
22525           * <code>required uint32 numPools = 2;</code>
22526           */
22527          public Builder setNumPools(int value) {
22528            bitField0_ |= 0x00000002;
22529            numPools_ = value;
22530            onChanged();
22531            return this;
22532          }
22533          /**
22534           * <code>required uint32 numPools = 2;</code>
22535           */
22536          public Builder clearNumPools() {
22537            bitField0_ = (bitField0_ & ~0x00000002);
22538            numPools_ = 0;
22539            onChanged();
22540            return this;
22541          }
22542    
22543          // required uint32 numDirectives = 3;
22544          private int numDirectives_ ;
22545          /**
22546           * <code>required uint32 numDirectives = 3;</code>
22547           *
22548           * <pre>
22549           * repeated CachePoolInfoProto pools
22550           * repeated CacheDirectiveInfoProto directives
22551           * </pre>
22552           */
22553          public boolean hasNumDirectives() {
22554            return ((bitField0_ & 0x00000004) == 0x00000004);
22555          }
22556          /**
22557           * <code>required uint32 numDirectives = 3;</code>
22558           *
22559           * <pre>
22560           * repeated CachePoolInfoProto pools
22561           * repeated CacheDirectiveInfoProto directives
22562           * </pre>
22563           */
22564          public int getNumDirectives() {
22565            return numDirectives_;
22566          }
22567          /**
22568           * <code>required uint32 numDirectives = 3;</code>
22569           *
22570           * <pre>
22571           * repeated CachePoolInfoProto pools
22572           * repeated CacheDirectiveInfoProto directives
22573           * </pre>
22574           */
22575          public Builder setNumDirectives(int value) {
22576            bitField0_ |= 0x00000004;
22577            numDirectives_ = value;
22578            onChanged();
22579            return this;
22580          }
22581          /**
22582           * <code>required uint32 numDirectives = 3;</code>
22583           *
22584           * <pre>
22585           * repeated CachePoolInfoProto pools
22586           * repeated CacheDirectiveInfoProto directives
22587           * </pre>
22588           */
22589          public Builder clearNumDirectives() {
22590            bitField0_ = (bitField0_ & ~0x00000004);
22591            numDirectives_ = 0;
22592            onChanged();
22593            return this;
22594          }
22595    
22596          // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.CacheManagerSection)
22597        }
22598    
22599        static {
22600          defaultInstance = new CacheManagerSection(true);
22601          defaultInstance.initFields();
22602        }
22603    
22604        // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.CacheManagerSection)
22605      }
22606    
22607      private static com.google.protobuf.Descriptors.Descriptor
22608        internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
22609      private static
22610        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22611          internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable;
22612      private static com.google.protobuf.Descriptors.Descriptor
22613        internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
22614      private static
22615        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22616          internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable;
22617      private static com.google.protobuf.Descriptors.Descriptor
22618        internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
22619      private static
22620        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22621          internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable;
22622      private static com.google.protobuf.Descriptors.Descriptor
22623        internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
22624      private static
22625        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22626          internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable;
22627      private static com.google.protobuf.Descriptors.Descriptor
22628        internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
22629      private static
22630        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22631          internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable;
22632      private static com.google.protobuf.Descriptors.Descriptor
22633        internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
22634      private static
22635        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22636          internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable;
22637      private static com.google.protobuf.Descriptors.Descriptor
22638        internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor;
22639      private static
22640        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22641          internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_fieldAccessorTable;
22642      private static com.google.protobuf.Descriptors.Descriptor
22643        internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor;
22644      private static
22645        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22646          internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_fieldAccessorTable;
22647      private static com.google.protobuf.Descriptors.Descriptor
22648        internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
22649      private static
22650        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22651          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable;
22652      private static com.google.protobuf.Descriptors.Descriptor
22653        internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
22654      private static
22655        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22656          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable;
22657      private static com.google.protobuf.Descriptors.Descriptor
22658        internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
22659      private static
22660        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22661          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable;
22662      private static com.google.protobuf.Descriptors.Descriptor
22663        internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
22664      private static
22665        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22666          internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable;
22667      private static com.google.protobuf.Descriptors.Descriptor
22668        internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
22669      private static
22670        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22671          internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable;
22672      private static com.google.protobuf.Descriptors.Descriptor
22673        internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
22674      private static
22675        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22676          internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable;
22677      private static com.google.protobuf.Descriptors.Descriptor
22678        internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
22679      private static
22680        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22681          internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable;
22682      private static com.google.protobuf.Descriptors.Descriptor
22683        internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
22684      private static
22685        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22686          internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable;
22687      private static com.google.protobuf.Descriptors.Descriptor
22688        internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
22689      private static
22690        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22691          internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable;
22692      private static com.google.protobuf.Descriptors.Descriptor
22693        internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
22694      private static
22695        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22696          internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable;
22697      private static com.google.protobuf.Descriptors.Descriptor
22698        internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
22699      private static
22700        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22701          internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable;
22702      private static com.google.protobuf.Descriptors.Descriptor
22703        internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
22704      private static
22705        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22706          internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable;
22707      private static com.google.protobuf.Descriptors.Descriptor
22708        internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
22709      private static
22710        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22711          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable;
22712      private static com.google.protobuf.Descriptors.Descriptor
22713        internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
22714      private static
22715        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22716          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable;
22717      private static com.google.protobuf.Descriptors.Descriptor
22718        internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
22719      private static
22720        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22721          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable;
22722      private static com.google.protobuf.Descriptors.Descriptor
22723        internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
22724      private static
22725        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22726          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable;
22727      private static com.google.protobuf.Descriptors.Descriptor
22728        internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
22729      private static
22730        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22731          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable;
22732      private static com.google.protobuf.Descriptors.Descriptor
22733        internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
22734      private static
22735        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22736          internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable;
22737      private static com.google.protobuf.Descriptors.Descriptor
22738        internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
22739      private static
22740        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22741          internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable;
22742      private static com.google.protobuf.Descriptors.Descriptor
22743        internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
22744      private static
22745        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22746          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable;
22747      private static com.google.protobuf.Descriptors.Descriptor
22748        internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
22749      private static
22750        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22751          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable;
22752      private static com.google.protobuf.Descriptors.Descriptor
22753        internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
22754      private static
22755        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22756          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable;
22757      private static com.google.protobuf.Descriptors.Descriptor
22758        internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
22759      private static
22760        com.google.protobuf.GeneratedMessage.FieldAccessorTable
22761          internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable;
22762    
22763      public static com.google.protobuf.Descriptors.FileDescriptor
22764          getDescriptor() {
22765        return descriptor;
22766      }
22767      private static com.google.protobuf.Descriptors.FileDescriptor
22768          descriptor;
22769      static {
22770        java.lang.String[] descriptorData = {
22771          "\n\rfsimage.proto\022\023hadoop.hdfs.fsimage\032\nhd" +
22772          "fs.proto\032\tacl.proto\032\013xattr.proto\"\277\001\n\013Fil" +
22773          "eSummary\022\025\n\rondiskVersion\030\001 \002(\r\022\025\n\rlayou" +
22774          "tVersion\030\002 \002(\r\022\r\n\005codec\030\003 \001(\t\022:\n\010section" +
22775          "s\030\004 \003(\0132(.hadoop.hdfs.fsimage.FileSummar" +
22776          "y.Section\0327\n\007Section\022\014\n\004name\030\001 \001(\t\022\016\n\006le" +
22777          "ngth\030\002 \001(\004\022\016\n\006offset\030\003 \001(\004\"\277\001\n\021NameSyste" +
22778          "mSection\022\023\n\013namespaceId\030\001 \001(\r\022\022\n\ngenstam" +
22779          "pV1\030\002 \001(\004\022\022\n\ngenstampV2\030\003 \001(\004\022\027\n\017genstam" +
22780          "pV1Limit\030\004 \001(\004\022\034\n\024lastAllocatedBlockId\030\005",
22781          " \001(\004\022\025\n\rtransactionId\030\006 \001(\004\022\037\n\027rollingUp" +
22782          "gradeStartTime\030\007 \001(\004\"\346\n\n\014INodeSection\022\023\n" +
22783          "\013lastInodeId\030\001 \001(\004\022\021\n\tnumInodes\030\002 \001(\004\032I\n" +
22784          "\034FileUnderConstructionFeature\022\022\n\nclientN" +
22785          "ame\030\001 \001(\t\022\025\n\rclientMachine\030\002 \001(\t\032&\n\017AclF" +
22786          "eatureProto\022\023\n\007entries\030\002 \003(\007B\002\020\001\0320\n\021XAtt" +
22787          "rCompactProto\022\014\n\004name\030\001 \002(\007\022\r\n\005value\030\002 \001" +
22788          "(\014\032X\n\021XAttrFeatureProto\022C\n\006xAttrs\030\001 \003(\0132" +
22789          "3.hadoop.hdfs.fsimage.INodeSection.XAttr" +
22790          "CompactProto\032\225\003\n\tINodeFile\022\023\n\013replicatio",
22791          "n\030\001 \001(\r\022\030\n\020modificationTime\030\002 \001(\004\022\022\n\nacc" +
22792          "essTime\030\003 \001(\004\022\032\n\022preferredBlockSize\030\004 \001(" +
22793          "\004\022\022\n\npermission\030\005 \001(\006\022\'\n\006blocks\030\006 \003(\0132\027." +
22794          "hadoop.hdfs.BlockProto\022N\n\006fileUC\030\007 \001(\0132>" +
22795          ".hadoop.hdfs.fsimage.INodeSection.FileUn" +
22796          "derConstructionFeature\022>\n\003acl\030\010 \001(\01321.ha" +
22797          "doop.hdfs.fsimage.INodeSection.AclFeatur" +
22798          "eProto\022C\n\006xAttrs\030\t \001(\01323.hadoop.hdfs.fsi" +
22799          "mage.INodeSection.XAttrFeatureProto\022\027\n\017s" +
22800          "toragePolicyID\030\n \001(\r\032\345\001\n\016INodeDirectory\022",
22801          "\030\n\020modificationTime\030\001 \001(\004\022\017\n\007nsQuota\030\002 \001" +
22802          "(\004\022\017\n\007dsQuota\030\003 \001(\004\022\022\n\npermission\030\004 \001(\006\022" +
22803          ">\n\003acl\030\005 \001(\01321.hadoop.hdfs.fsimage.INode" +
22804          "Section.AclFeatureProto\022C\n\006xAttrs\030\006 \001(\0132" +
22805          "3.hadoop.hdfs.fsimage.INodeSection.XAttr" +
22806          "FeatureProto\032`\n\014INodeSymlink\022\022\n\npermissi" +
22807          "on\030\001 \001(\006\022\016\n\006target\030\002 \001(\014\022\030\n\020modification" +
22808          "Time\030\003 \001(\004\022\022\n\naccessTime\030\004 \001(\004\032\314\002\n\005INode" +
22809          "\022:\n\004type\030\001 \002(\0162,.hadoop.hdfs.fsimage.INo" +
22810          "deSection.INode.Type\022\n\n\002id\030\002 \002(\004\022\014\n\004name",
22811          "\030\003 \001(\014\0229\n\004file\030\004 \001(\0132+.hadoop.hdfs.fsima" +
22812          "ge.INodeSection.INodeFile\022C\n\tdirectory\030\005" +
22813          " \001(\01320.hadoop.hdfs.fsimage.INodeSection." +
22814          "INodeDirectory\022?\n\007symlink\030\006 \001(\0132..hadoop" +
22815          ".hdfs.fsimage.INodeSection.INodeSymlink\"" +
22816          ",\n\004Type\022\010\n\004FILE\020\001\022\r\n\tDIRECTORY\020\002\022\013\n\007SYML" +
22817          "INK\020\003\"`\n\035FilesUnderConstructionSection\032?" +
22818          "\n\032FileUnderConstructionEntry\022\017\n\007inodeId\030" +
22819          "\001 \001(\004\022\020\n\010fullPath\030\002 \001(\t\"b\n\025INodeDirector" +
22820          "ySection\032I\n\010DirEntry\022\016\n\006parent\030\001 \001(\004\022\024\n\010",
22821          "children\030\002 \003(\004B\002\020\001\022\027\n\013refChildren\030\003 \003(\rB" +
22822          "\002\020\001\"z\n\025INodeReferenceSection\032a\n\016INodeRef" +
22823          "erence\022\022\n\nreferredId\030\001 \001(\004\022\014\n\004name\030\002 \001(\014" +
22824          "\022\025\n\rdstSnapshotId\030\003 \001(\r\022\026\n\016lastSnapshotI" +
22825          "d\030\004 \001(\r\"\265\001\n\017SnapshotSection\022\027\n\017snapshotC" +
22826          "ounter\030\001 \001(\r\022\034\n\020snapshottableDir\030\002 \003(\004B\002" +
22827          "\020\001\022\024\n\014numSnapshots\030\003 \001(\r\032U\n\010Snapshot\022\022\n\n" +
22828          "snapshotId\030\001 \001(\r\0225\n\004root\030\002 \001(\0132\'.hadoop." +
22829          "hdfs.fsimage.INodeSection.INode\"\327\004\n\023Snap" +
22830          "shotDiffSection\032 \n\020CreatedListEntry\022\014\n\004n",
22831          "ame\030\001 \001(\014\032\367\001\n\rDirectoryDiff\022\022\n\nsnapshotI" +
22832          "d\030\001 \001(\r\022\024\n\014childrenSize\030\002 \001(\r\022\026\n\016isSnaps" +
22833          "hotRoot\030\003 \001(\010\022\014\n\004name\030\004 \001(\014\022F\n\014snapshotC" +
22834          "opy\030\005 \001(\01320.hadoop.hdfs.fsimage.INodeSec" +
22835          "tion.INodeDirectory\022\027\n\017createdListSize\030\006" +
22836          " \001(\r\022\030\n\014deletedINode\030\007 \003(\004B\002\020\001\022\033\n\017delete" +
22837          "dINodeRef\030\010 \003(\rB\002\020\001\032\201\001\n\010FileDiff\022\022\n\nsnap" +
22838          "shotId\030\001 \001(\r\022\020\n\010fileSize\030\002 \001(\004\022\014\n\004name\030\003" +
22839          " \001(\014\022A\n\014snapshotCopy\030\004 \001(\0132+.hadoop.hdfs" +
22840          ".fsimage.INodeSection.INodeFile\032\237\001\n\tDiff",
22841          "Entry\022E\n\004type\030\001 \002(\01627.hadoop.hdfs.fsimag" +
22842          "e.SnapshotDiffSection.DiffEntry.Type\022\017\n\007" +
22843          "inodeId\030\002 \001(\004\022\021\n\tnumOfDiff\030\003 \001(\r\"\'\n\004Type" +
22844          "\022\014\n\010FILEDIFF\020\001\022\021\n\rDIRECTORYDIFF\020\002\"H\n\022Str" +
22845          "ingTableSection\022\020\n\010numEntry\030\001 \001(\r\032 \n\005Ent" +
22846          "ry\022\n\n\002id\030\001 \001(\r\022\013\n\003str\030\002 \001(\t\"\341\002\n\024SecretMa" +
22847          "nagerSection\022\021\n\tcurrentId\030\001 \001(\r\022\033\n\023token" +
22848          "SequenceNumber\030\002 \001(\r\022\017\n\007numKeys\030\003 \001(\r\022\021\n" +
22849          "\tnumTokens\030\004 \001(\r\032<\n\rDelegationKey\022\n\n\002id\030" +
22850          "\001 \001(\r\022\022\n\nexpiryDate\030\002 \001(\004\022\013\n\003key\030\003 \001(\014\032\266",
22851          "\001\n\014PersistToken\022\017\n\007version\030\001 \001(\r\022\r\n\005owne" +
22852          "r\030\002 \001(\t\022\017\n\007renewer\030\003 \001(\t\022\020\n\010realUser\030\004 \001" +
22853          "(\t\022\021\n\tissueDate\030\005 \001(\004\022\017\n\007maxDate\030\006 \001(\004\022\026" +
22854          "\n\016sequenceNumber\030\007 \001(\r\022\023\n\013masterKeyId\030\010 " +
22855          "\001(\r\022\022\n\nexpiryDate\030\t \001(\004\"W\n\023CacheManagerS" +
22856          "ection\022\027\n\017nextDirectiveId\030\001 \002(\004\022\020\n\010numPo" +
22857          "ols\030\002 \002(\r\022\025\n\rnumDirectives\030\003 \002(\rB6\n&org." +
22858          "apache.hadoop.hdfs.server.namenodeB\014FsIm" +
22859          "ageProto"
22860        };
22861        com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
22862          new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
22863            public com.google.protobuf.ExtensionRegistry assignDescriptors(
22864                com.google.protobuf.Descriptors.FileDescriptor root) {
22865              descriptor = root;
22866              internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor =
22867                getDescriptor().getMessageTypes().get(0);
22868              internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable = new
22869                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22870                  internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor,
22871                  new java.lang.String[] { "OndiskVersion", "LayoutVersion", "Codec", "Sections", });
22872              internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor =
22873                internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor.getNestedTypes().get(0);
22874              internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable = new
22875                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22876                  internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor,
22877                  new java.lang.String[] { "Name", "Length", "Offset", });
22878              internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor =
22879                getDescriptor().getMessageTypes().get(1);
22880              internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable = new
22881                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22882                  internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor,
22883                  new java.lang.String[] { "NamespaceId", "GenstampV1", "GenstampV2", "GenstampV1Limit", "LastAllocatedBlockId", "TransactionId", "RollingUpgradeStartTime", });
22884              internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor =
22885                getDescriptor().getMessageTypes().get(2);
22886              internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable = new
22887                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22888                  internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor,
22889                  new java.lang.String[] { "LastInodeId", "NumInodes", });
22890              internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor =
22891                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(0);
22892              internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable = new
22893                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22894                  internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor,
22895                  new java.lang.String[] { "ClientName", "ClientMachine", });
22896              internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor =
22897                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(1);
22898              internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable = new
22899                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22900                  internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor,
22901                  new java.lang.String[] { "Entries", });
22902              internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor =
22903                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(2);
22904              internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_fieldAccessorTable = new
22905                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22906                  internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor,
22907                  new java.lang.String[] { "Name", "Value", });
22908              internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor =
22909                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(3);
22910              internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_fieldAccessorTable = new
22911                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22912                  internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor,
22913                  new java.lang.String[] { "XAttrs", });
22914              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor =
22915                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(4);
22916              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable = new
22917                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22918                  internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor,
22919                  new java.lang.String[] { "Replication", "ModificationTime", "AccessTime", "PreferredBlockSize", "Permission", "Blocks", "FileUC", "Acl", "XAttrs", "StoragePolicyID", });
22920              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor =
22921                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(5);
22922              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable = new
22923                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22924                  internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor,
22925                  new java.lang.String[] { "ModificationTime", "NsQuota", "DsQuota", "Permission", "Acl", "XAttrs", });
22926              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor =
22927                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(6);
22928              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable = new
22929                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22930                  internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor,
22931                  new java.lang.String[] { "Permission", "Target", "ModificationTime", "AccessTime", });
22932              internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor =
22933                internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(7);
22934              internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable = new
22935                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22936                  internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor,
22937                  new java.lang.String[] { "Type", "Id", "Name", "File", "Directory", "Symlink", });
22938              internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor =
22939                getDescriptor().getMessageTypes().get(3);
22940              internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable = new
22941                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22942                  internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor,
22943                  new java.lang.String[] { });
22944              internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor =
22945                internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor.getNestedTypes().get(0);
22946              internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable = new
22947                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22948                  internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor,
22949                  new java.lang.String[] { "InodeId", "FullPath", });
22950              internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor =
22951                getDescriptor().getMessageTypes().get(4);
22952              internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable = new
22953                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22954                  internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor,
22955                  new java.lang.String[] { });
22956              internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor =
22957                internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor.getNestedTypes().get(0);
22958              internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable = new
22959                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22960                  internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor,
22961                  new java.lang.String[] { "Parent", "Children", "RefChildren", });
22962              internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor =
22963                getDescriptor().getMessageTypes().get(5);
22964              internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable = new
22965                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22966                  internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor,
22967                  new java.lang.String[] { });
22968              internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor =
22969                internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor.getNestedTypes().get(0);
22970              internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable = new
22971                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22972                  internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor,
22973                  new java.lang.String[] { "ReferredId", "Name", "DstSnapshotId", "LastSnapshotId", });
22974              internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor =
22975                getDescriptor().getMessageTypes().get(6);
22976              internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable = new
22977                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22978                  internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor,
22979                  new java.lang.String[] { "SnapshotCounter", "SnapshottableDir", "NumSnapshots", });
22980              internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor =
22981                internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor.getNestedTypes().get(0);
22982              internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable = new
22983                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22984                  internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor,
22985                  new java.lang.String[] { "SnapshotId", "Root", });
22986              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor =
22987                getDescriptor().getMessageTypes().get(7);
22988              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable = new
22989                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22990                  internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor,
22991                  new java.lang.String[] { });
22992              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor =
22993                internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(0);
22994              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable = new
22995                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22996                  internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor,
22997                  new java.lang.String[] { "Name", });
22998              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor =
22999                internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(1);
23000              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable = new
23001                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23002                  internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor,
23003                  new java.lang.String[] { "SnapshotId", "ChildrenSize", "IsSnapshotRoot", "Name", "SnapshotCopy", "CreatedListSize", "DeletedINode", "DeletedINodeRef", });
23004              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor =
23005                internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(2);
23006              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable = new
23007                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23008                  internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor,
23009                  new java.lang.String[] { "SnapshotId", "FileSize", "Name", "SnapshotCopy", });
23010              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor =
23011                internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(3);
23012              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable = new
23013                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23014                  internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor,
23015                  new java.lang.String[] { "Type", "InodeId", "NumOfDiff", });
23016              internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor =
23017                getDescriptor().getMessageTypes().get(8);
23018              internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable = new
23019                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23020                  internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor,
23021                  new java.lang.String[] { "NumEntry", });
23022              internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor =
23023                internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor.getNestedTypes().get(0);
23024              internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable = new
23025                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23026                  internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor,
23027                  new java.lang.String[] { "Id", "Str", });
23028              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor =
23029                getDescriptor().getMessageTypes().get(9);
23030              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable = new
23031                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23032                  internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor,
23033                  new java.lang.String[] { "CurrentId", "TokenSequenceNumber", "NumKeys", "NumTokens", });
23034              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor =
23035                internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor.getNestedTypes().get(0);
23036              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable = new
23037                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23038                  internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor,
23039                  new java.lang.String[] { "Id", "ExpiryDate", "Key", });
23040              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor =
23041                internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor.getNestedTypes().get(1);
23042              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable = new
23043                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23044                  internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor,
23045                  new java.lang.String[] { "Version", "Owner", "Renewer", "RealUser", "IssueDate", "MaxDate", "SequenceNumber", "MasterKeyId", "ExpiryDate", });
23046              internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor =
23047                getDescriptor().getMessageTypes().get(10);
23048              internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable = new
23049                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23050                  internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor,
23051                  new java.lang.String[] { "NextDirectiveId", "NumPools", "NumDirectives", });
23052              return null;
23053            }
23054          };
23055        com.google.protobuf.Descriptors.FileDescriptor
23056          .internalBuildGeneratedFileFrom(descriptorData,
23057            new com.google.protobuf.Descriptors.FileDescriptor[] {
23058              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.getDescriptor(),
23059              org.apache.hadoop.hdfs.protocol.proto.AclProtos.getDescriptor(),
23060              org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.getDescriptor(),
23061            }, assigner);
23062      }
23063    
23064      // @@protoc_insertion_point(outer_class_scope)
23065    }