001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: fsimage.proto
003
004package org.apache.hadoop.hdfs.server.namenode;
005
006public final class FsImageProto {
007  private FsImageProto() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  public interface FileSummaryOrBuilder
012      extends com.google.protobuf.MessageOrBuilder {
013
014    // required uint32 ondiskVersion = 1;
015    /**
016     * <code>required uint32 ondiskVersion = 1;</code>
017     *
018     * <pre>
019     * The version of the above EBNF grammars.
020     * </pre>
021     */
022    boolean hasOndiskVersion();
023    /**
024     * <code>required uint32 ondiskVersion = 1;</code>
025     *
026     * <pre>
027     * The version of the above EBNF grammars.
028     * </pre>
029     */
030    int getOndiskVersion();
031
032    // required uint32 layoutVersion = 2;
033    /**
034     * <code>required uint32 layoutVersion = 2;</code>
035     *
036     * <pre>
037     * layoutVersion describes which features are available in the
038     * FSImage.
039     * </pre>
040     */
041    boolean hasLayoutVersion();
042    /**
043     * <code>required uint32 layoutVersion = 2;</code>
044     *
045     * <pre>
046     * layoutVersion describes which features are available in the
047     * FSImage.
048     * </pre>
049     */
050    int getLayoutVersion();
051
052    // optional string codec = 3;
053    /**
054     * <code>optional string codec = 3;</code>
055     */
056    boolean hasCodec();
057    /**
058     * <code>optional string codec = 3;</code>
059     */
060    java.lang.String getCodec();
061    /**
062     * <code>optional string codec = 3;</code>
063     */
064    com.google.protobuf.ByteString
065        getCodecBytes();
066
067    // repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;
068    /**
069     * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
070     */
071    java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> 
072        getSectionsList();
073    /**
074     * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
075     */
076    org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getSections(int index);
077    /**
078     * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
079     */
080    int getSectionsCount();
081    /**
082     * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
083     */
084    java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
085        getSectionsOrBuilderList();
086    /**
087     * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
088     */
089    org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder getSectionsOrBuilder(
090        int index);
091  }
092  /**
093   * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary}
094   */
095  public static final class FileSummary extends
096      com.google.protobuf.GeneratedMessage
097      implements FileSummaryOrBuilder {
098    // Use FileSummary.newBuilder() to construct.
099    private FileSummary(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
100      super(builder);
101      this.unknownFields = builder.getUnknownFields();
102    }
103    private FileSummary(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
104
105    private static final FileSummary defaultInstance;
106    public static FileSummary getDefaultInstance() {
107      return defaultInstance;
108    }
109
110    public FileSummary getDefaultInstanceForType() {
111      return defaultInstance;
112    }
113
114    private final com.google.protobuf.UnknownFieldSet unknownFields;
115    @java.lang.Override
116    public final com.google.protobuf.UnknownFieldSet
117        getUnknownFields() {
118      return this.unknownFields;
119    }
120    private FileSummary(
121        com.google.protobuf.CodedInputStream input,
122        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
123        throws com.google.protobuf.InvalidProtocolBufferException {
124      initFields();
125      int mutable_bitField0_ = 0;
126      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
127          com.google.protobuf.UnknownFieldSet.newBuilder();
128      try {
129        boolean done = false;
130        while (!done) {
131          int tag = input.readTag();
132          switch (tag) {
133            case 0:
134              done = true;
135              break;
136            default: {
137              if (!parseUnknownField(input, unknownFields,
138                                     extensionRegistry, tag)) {
139                done = true;
140              }
141              break;
142            }
143            case 8: {
144              bitField0_ |= 0x00000001;
145              ondiskVersion_ = input.readUInt32();
146              break;
147            }
148            case 16: {
149              bitField0_ |= 0x00000002;
150              layoutVersion_ = input.readUInt32();
151              break;
152            }
153            case 26: {
154              bitField0_ |= 0x00000004;
155              codec_ = input.readBytes();
156              break;
157            }
158            case 34: {
159              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
160                sections_ = new java.util.ArrayList<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section>();
161                mutable_bitField0_ |= 0x00000008;
162              }
163              sections_.add(input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.PARSER, extensionRegistry));
164              break;
165            }
166          }
167        }
168      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
169        throw e.setUnfinishedMessage(this);
170      } catch (java.io.IOException e) {
171        throw new com.google.protobuf.InvalidProtocolBufferException(
172            e.getMessage()).setUnfinishedMessage(this);
173      } finally {
174        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
175          sections_ = java.util.Collections.unmodifiableList(sections_);
176        }
177        this.unknownFields = unknownFields.build();
178        makeExtensionsImmutable();
179      }
180    }
181    public static final com.google.protobuf.Descriptors.Descriptor
182        getDescriptor() {
183      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
184    }
185
186    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
187        internalGetFieldAccessorTable() {
188      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable
189          .ensureFieldAccessorsInitialized(
190              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Builder.class);
191    }
192
193    public static com.google.protobuf.Parser<FileSummary> PARSER =
194        new com.google.protobuf.AbstractParser<FileSummary>() {
195      public FileSummary parsePartialFrom(
196          com.google.protobuf.CodedInputStream input,
197          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
198          throws com.google.protobuf.InvalidProtocolBufferException {
199        return new FileSummary(input, extensionRegistry);
200      }
201    };
202
203    @java.lang.Override
204    public com.google.protobuf.Parser<FileSummary> getParserForType() {
205      return PARSER;
206    }
207
208    public interface SectionOrBuilder
209        extends com.google.protobuf.MessageOrBuilder {
210
211      // optional string name = 1;
212      /**
213       * <code>optional string name = 1;</code>
214       */
215      boolean hasName();
216      /**
217       * <code>optional string name = 1;</code>
218       */
219      java.lang.String getName();
220      /**
221       * <code>optional string name = 1;</code>
222       */
223      com.google.protobuf.ByteString
224          getNameBytes();
225
226      // optional uint64 length = 2;
227      /**
228       * <code>optional uint64 length = 2;</code>
229       */
230      boolean hasLength();
231      /**
232       * <code>optional uint64 length = 2;</code>
233       */
234      long getLength();
235
236      // optional uint64 offset = 3;
237      /**
238       * <code>optional uint64 offset = 3;</code>
239       */
240      boolean hasOffset();
241      /**
242       * <code>optional uint64 offset = 3;</code>
243       */
244      long getOffset();
245    }
246    /**
247     * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary.Section}
248     *
249     * <pre>
250     * index for each section
251     * </pre>
252     */
253    public static final class Section extends
254        com.google.protobuf.GeneratedMessage
255        implements SectionOrBuilder {
256      // Use Section.newBuilder() to construct.
257      private Section(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
258        super(builder);
259        this.unknownFields = builder.getUnknownFields();
260      }
261      private Section(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
262
263      private static final Section defaultInstance;
264      public static Section getDefaultInstance() {
265        return defaultInstance;
266      }
267
268      public Section getDefaultInstanceForType() {
269        return defaultInstance;
270      }
271
272      private final com.google.protobuf.UnknownFieldSet unknownFields;
273      @java.lang.Override
274      public final com.google.protobuf.UnknownFieldSet
275          getUnknownFields() {
276        return this.unknownFields;
277      }
278      private Section(
279          com.google.protobuf.CodedInputStream input,
280          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
281          throws com.google.protobuf.InvalidProtocolBufferException {
282        initFields();
283        int mutable_bitField0_ = 0;
284        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
285            com.google.protobuf.UnknownFieldSet.newBuilder();
286        try {
287          boolean done = false;
288          while (!done) {
289            int tag = input.readTag();
290            switch (tag) {
291              case 0:
292                done = true;
293                break;
294              default: {
295                if (!parseUnknownField(input, unknownFields,
296                                       extensionRegistry, tag)) {
297                  done = true;
298                }
299                break;
300              }
301              case 10: {
302                bitField0_ |= 0x00000001;
303                name_ = input.readBytes();
304                break;
305              }
306              case 16: {
307                bitField0_ |= 0x00000002;
308                length_ = input.readUInt64();
309                break;
310              }
311              case 24: {
312                bitField0_ |= 0x00000004;
313                offset_ = input.readUInt64();
314                break;
315              }
316            }
317          }
318        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
319          throw e.setUnfinishedMessage(this);
320        } catch (java.io.IOException e) {
321          throw new com.google.protobuf.InvalidProtocolBufferException(
322              e.getMessage()).setUnfinishedMessage(this);
323        } finally {
324          this.unknownFields = unknownFields.build();
325          makeExtensionsImmutable();
326        }
327      }
328      public static final com.google.protobuf.Descriptors.Descriptor
329          getDescriptor() {
330        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
331      }
332
333      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
334          internalGetFieldAccessorTable() {
335        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable
336            .ensureFieldAccessorsInitialized(
337                org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder.class);
338      }
339
340      public static com.google.protobuf.Parser<Section> PARSER =
341          new com.google.protobuf.AbstractParser<Section>() {
342        public Section parsePartialFrom(
343            com.google.protobuf.CodedInputStream input,
344            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
345            throws com.google.protobuf.InvalidProtocolBufferException {
346          return new Section(input, extensionRegistry);
347        }
348      };
349
350      @java.lang.Override
351      public com.google.protobuf.Parser<Section> getParserForType() {
352        return PARSER;
353      }
354
355      private int bitField0_;
356      // optional string name = 1;
357      public static final int NAME_FIELD_NUMBER = 1;
358      private java.lang.Object name_;
359      /**
360       * <code>optional string name = 1;</code>
361       */
362      public boolean hasName() {
363        return ((bitField0_ & 0x00000001) == 0x00000001);
364      }
365      /**
366       * <code>optional string name = 1;</code>
367       */
368      public java.lang.String getName() {
369        java.lang.Object ref = name_;
370        if (ref instanceof java.lang.String) {
371          return (java.lang.String) ref;
372        } else {
373          com.google.protobuf.ByteString bs = 
374              (com.google.protobuf.ByteString) ref;
375          java.lang.String s = bs.toStringUtf8();
376          if (bs.isValidUtf8()) {
377            name_ = s;
378          }
379          return s;
380        }
381      }
382      /**
383       * <code>optional string name = 1;</code>
384       */
385      public com.google.protobuf.ByteString
386          getNameBytes() {
387        java.lang.Object ref = name_;
388        if (ref instanceof java.lang.String) {
389          com.google.protobuf.ByteString b = 
390              com.google.protobuf.ByteString.copyFromUtf8(
391                  (java.lang.String) ref);
392          name_ = b;
393          return b;
394        } else {
395          return (com.google.protobuf.ByteString) ref;
396        }
397      }
398
399      // optional uint64 length = 2;
400      public static final int LENGTH_FIELD_NUMBER = 2;
401      private long length_;
402      /**
403       * <code>optional uint64 length = 2;</code>
404       */
405      public boolean hasLength() {
406        return ((bitField0_ & 0x00000002) == 0x00000002);
407      }
408      /**
409       * <code>optional uint64 length = 2;</code>
410       */
411      public long getLength() {
412        return length_;
413      }
414
415      // optional uint64 offset = 3;
416      public static final int OFFSET_FIELD_NUMBER = 3;
417      private long offset_;
418      /**
419       * <code>optional uint64 offset = 3;</code>
420       */
421      public boolean hasOffset() {
422        return ((bitField0_ & 0x00000004) == 0x00000004);
423      }
424      /**
425       * <code>optional uint64 offset = 3;</code>
426       */
427      public long getOffset() {
428        return offset_;
429      }
430
431      private void initFields() {
432        name_ = "";
433        length_ = 0L;
434        offset_ = 0L;
435      }
436      private byte memoizedIsInitialized = -1;
437      public final boolean isInitialized() {
438        byte isInitialized = memoizedIsInitialized;
439        if (isInitialized != -1) return isInitialized == 1;
440
441        memoizedIsInitialized = 1;
442        return true;
443      }
444
445      public void writeTo(com.google.protobuf.CodedOutputStream output)
446                          throws java.io.IOException {
447        getSerializedSize();
448        if (((bitField0_ & 0x00000001) == 0x00000001)) {
449          output.writeBytes(1, getNameBytes());
450        }
451        if (((bitField0_ & 0x00000002) == 0x00000002)) {
452          output.writeUInt64(2, length_);
453        }
454        if (((bitField0_ & 0x00000004) == 0x00000004)) {
455          output.writeUInt64(3, offset_);
456        }
457        getUnknownFields().writeTo(output);
458      }
459
460      private int memoizedSerializedSize = -1;
461      public int getSerializedSize() {
462        int size = memoizedSerializedSize;
463        if (size != -1) return size;
464
465        size = 0;
466        if (((bitField0_ & 0x00000001) == 0x00000001)) {
467          size += com.google.protobuf.CodedOutputStream
468            .computeBytesSize(1, getNameBytes());
469        }
470        if (((bitField0_ & 0x00000002) == 0x00000002)) {
471          size += com.google.protobuf.CodedOutputStream
472            .computeUInt64Size(2, length_);
473        }
474        if (((bitField0_ & 0x00000004) == 0x00000004)) {
475          size += com.google.protobuf.CodedOutputStream
476            .computeUInt64Size(3, offset_);
477        }
478        size += getUnknownFields().getSerializedSize();
479        memoizedSerializedSize = size;
480        return size;
481      }
482
483      private static final long serialVersionUID = 0L;
484      @java.lang.Override
485      protected java.lang.Object writeReplace()
486          throws java.io.ObjectStreamException {
487        return super.writeReplace();
488      }
489
490      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
491          com.google.protobuf.ByteString data)
492          throws com.google.protobuf.InvalidProtocolBufferException {
493        return PARSER.parseFrom(data);
494      }
495      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
496          com.google.protobuf.ByteString data,
497          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
498          throws com.google.protobuf.InvalidProtocolBufferException {
499        return PARSER.parseFrom(data, extensionRegistry);
500      }
501      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(byte[] data)
502          throws com.google.protobuf.InvalidProtocolBufferException {
503        return PARSER.parseFrom(data);
504      }
505      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
506          byte[] data,
507          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
508          throws com.google.protobuf.InvalidProtocolBufferException {
509        return PARSER.parseFrom(data, extensionRegistry);
510      }
511      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(java.io.InputStream input)
512          throws java.io.IOException {
513        return PARSER.parseFrom(input);
514      }
515      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
516          java.io.InputStream input,
517          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
518          throws java.io.IOException {
519        return PARSER.parseFrom(input, extensionRegistry);
520      }
521      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseDelimitedFrom(java.io.InputStream input)
522          throws java.io.IOException {
523        return PARSER.parseDelimitedFrom(input);
524      }
525      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseDelimitedFrom(
526          java.io.InputStream input,
527          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
528          throws java.io.IOException {
529        return PARSER.parseDelimitedFrom(input, extensionRegistry);
530      }
531      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
532          com.google.protobuf.CodedInputStream input)
533          throws java.io.IOException {
534        return PARSER.parseFrom(input);
535      }
536      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parseFrom(
537          com.google.protobuf.CodedInputStream input,
538          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
539          throws java.io.IOException {
540        return PARSER.parseFrom(input, extensionRegistry);
541      }
542
543      public static Builder newBuilder() { return Builder.create(); }
544      public Builder newBuilderForType() { return newBuilder(); }
545      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section prototype) {
546        return newBuilder().mergeFrom(prototype);
547      }
548      public Builder toBuilder() { return newBuilder(this); }
549
550      @java.lang.Override
551      protected Builder newBuilderForType(
552          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
553        Builder builder = new Builder(parent);
554        return builder;
555      }
556      /**
557       * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary.Section}
558       *
559       * <pre>
560       * index for each section
561       * </pre>
562       */
563      public static final class Builder extends
564          com.google.protobuf.GeneratedMessage.Builder<Builder>
565         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder {
566        public static final com.google.protobuf.Descriptors.Descriptor
567            getDescriptor() {
568          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
569        }
570
571        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
572            internalGetFieldAccessorTable() {
573          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable
574              .ensureFieldAccessorsInitialized(
575                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder.class);
576        }
577
578        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.newBuilder()
579        private Builder() {
580          maybeForceBuilderInitialization();
581        }
582
583        private Builder(
584            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
585          super(parent);
586          maybeForceBuilderInitialization();
587        }
588        private void maybeForceBuilderInitialization() {
589          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
590          }
591        }
592        private static Builder create() {
593          return new Builder();
594        }
595
596        public Builder clear() {
597          super.clear();
598          name_ = "";
599          bitField0_ = (bitField0_ & ~0x00000001);
600          length_ = 0L;
601          bitField0_ = (bitField0_ & ~0x00000002);
602          offset_ = 0L;
603          bitField0_ = (bitField0_ & ~0x00000004);
604          return this;
605        }
606
607        public Builder clone() {
608          return create().mergeFrom(buildPartial());
609        }
610
611        public com.google.protobuf.Descriptors.Descriptor
612            getDescriptorForType() {
613          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
614        }
615
616        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getDefaultInstanceForType() {
617          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance();
618        }
619
620        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section build() {
621          org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section result = buildPartial();
622          if (!result.isInitialized()) {
623            throw newUninitializedMessageException(result);
624          }
625          return result;
626        }
627
628        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section buildPartial() {
629          org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section(this);
630          int from_bitField0_ = bitField0_;
631          int to_bitField0_ = 0;
632          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
633            to_bitField0_ |= 0x00000001;
634          }
635          result.name_ = name_;
636          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
637            to_bitField0_ |= 0x00000002;
638          }
639          result.length_ = length_;
640          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
641            to_bitField0_ |= 0x00000004;
642          }
643          result.offset_ = offset_;
644          result.bitField0_ = to_bitField0_;
645          onBuilt();
646          return result;
647        }
648
649        public Builder mergeFrom(com.google.protobuf.Message other) {
650          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section) {
651            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section)other);
652          } else {
653            super.mergeFrom(other);
654            return this;
655          }
656        }
657
658        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section other) {
659          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance()) return this;
660          if (other.hasName()) {
661            bitField0_ |= 0x00000001;
662            name_ = other.name_;
663            onChanged();
664          }
665          if (other.hasLength()) {
666            setLength(other.getLength());
667          }
668          if (other.hasOffset()) {
669            setOffset(other.getOffset());
670          }
671          this.mergeUnknownFields(other.getUnknownFields());
672          return this;
673        }
674
675        public final boolean isInitialized() {
676          return true;
677        }
678
679        public Builder mergeFrom(
680            com.google.protobuf.CodedInputStream input,
681            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
682            throws java.io.IOException {
683          org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section parsedMessage = null;
684          try {
685            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
686          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
687            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section) e.getUnfinishedMessage();
688            throw e;
689          } finally {
690            if (parsedMessage != null) {
691              mergeFrom(parsedMessage);
692            }
693          }
694          return this;
695        }
696        private int bitField0_;
697
698        // optional string name = 1;
699        private java.lang.Object name_ = "";
700        /**
701         * <code>optional string name = 1;</code>
702         */
703        public boolean hasName() {
704          return ((bitField0_ & 0x00000001) == 0x00000001);
705        }
706        /**
707         * <code>optional string name = 1;</code>
708         */
709        public java.lang.String getName() {
710          java.lang.Object ref = name_;
711          if (!(ref instanceof java.lang.String)) {
712            java.lang.String s = ((com.google.protobuf.ByteString) ref)
713                .toStringUtf8();
714            name_ = s;
715            return s;
716          } else {
717            return (java.lang.String) ref;
718          }
719        }
720        /**
721         * <code>optional string name = 1;</code>
722         */
723        public com.google.protobuf.ByteString
724            getNameBytes() {
725          java.lang.Object ref = name_;
726          if (ref instanceof String) {
727            com.google.protobuf.ByteString b = 
728                com.google.protobuf.ByteString.copyFromUtf8(
729                    (java.lang.String) ref);
730            name_ = b;
731            return b;
732          } else {
733            return (com.google.protobuf.ByteString) ref;
734          }
735        }
736        /**
737         * <code>optional string name = 1;</code>
738         */
739        public Builder setName(
740            java.lang.String value) {
741          if (value == null) {
742    throw new NullPointerException();
743  }
744  bitField0_ |= 0x00000001;
745          name_ = value;
746          onChanged();
747          return this;
748        }
749        /**
750         * <code>optional string name = 1;</code>
751         */
752        public Builder clearName() {
753          bitField0_ = (bitField0_ & ~0x00000001);
754          name_ = getDefaultInstance().getName();
755          onChanged();
756          return this;
757        }
758        /**
759         * <code>optional string name = 1;</code>
760         */
761        public Builder setNameBytes(
762            com.google.protobuf.ByteString value) {
763          if (value == null) {
764    throw new NullPointerException();
765  }
766  bitField0_ |= 0x00000001;
767          name_ = value;
768          onChanged();
769          return this;
770        }
771
772        // optional uint64 length = 2;
773        private long length_ ;
774        /**
775         * <code>optional uint64 length = 2;</code>
776         */
777        public boolean hasLength() {
778          return ((bitField0_ & 0x00000002) == 0x00000002);
779        }
780        /**
781         * <code>optional uint64 length = 2;</code>
782         */
783        public long getLength() {
784          return length_;
785        }
786        /**
787         * <code>optional uint64 length = 2;</code>
788         */
789        public Builder setLength(long value) {
790          bitField0_ |= 0x00000002;
791          length_ = value;
792          onChanged();
793          return this;
794        }
795        /**
796         * <code>optional uint64 length = 2;</code>
797         */
798        public Builder clearLength() {
799          bitField0_ = (bitField0_ & ~0x00000002);
800          length_ = 0L;
801          onChanged();
802          return this;
803        }
804
805        // optional uint64 offset = 3;
806        private long offset_ ;
807        /**
808         * <code>optional uint64 offset = 3;</code>
809         */
810        public boolean hasOffset() {
811          return ((bitField0_ & 0x00000004) == 0x00000004);
812        }
813        /**
814         * <code>optional uint64 offset = 3;</code>
815         */
816        public long getOffset() {
817          return offset_;
818        }
819        /**
820         * <code>optional uint64 offset = 3;</code>
821         */
822        public Builder setOffset(long value) {
823          bitField0_ |= 0x00000004;
824          offset_ = value;
825          onChanged();
826          return this;
827        }
828        /**
829         * <code>optional uint64 offset = 3;</code>
830         */
831        public Builder clearOffset() {
832          bitField0_ = (bitField0_ & ~0x00000004);
833          offset_ = 0L;
834          onChanged();
835          return this;
836        }
837
838        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FileSummary.Section)
839      }
840
841      static {
842        defaultInstance = new Section(true);
843        defaultInstance.initFields();
844      }
845
846      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FileSummary.Section)
847    }
848
849    private int bitField0_;
850    // required uint32 ondiskVersion = 1;
851    public static final int ONDISKVERSION_FIELD_NUMBER = 1;
852    private int ondiskVersion_;
853    /**
854     * <code>required uint32 ondiskVersion = 1;</code>
855     *
856     * <pre>
857     * The version of the above EBNF grammars.
858     * </pre>
859     */
860    public boolean hasOndiskVersion() {
861      return ((bitField0_ & 0x00000001) == 0x00000001);
862    }
863    /**
864     * <code>required uint32 ondiskVersion = 1;</code>
865     *
866     * <pre>
867     * The version of the above EBNF grammars.
868     * </pre>
869     */
870    public int getOndiskVersion() {
871      return ondiskVersion_;
872    }
873
874    // required uint32 layoutVersion = 2;
875    public static final int LAYOUTVERSION_FIELD_NUMBER = 2;
876    private int layoutVersion_;
877    /**
878     * <code>required uint32 layoutVersion = 2;</code>
879     *
880     * <pre>
881     * layoutVersion describes which features are available in the
882     * FSImage.
883     * </pre>
884     */
885    public boolean hasLayoutVersion() {
886      return ((bitField0_ & 0x00000002) == 0x00000002);
887    }
888    /**
889     * <code>required uint32 layoutVersion = 2;</code>
890     *
891     * <pre>
892     * layoutVersion describes which features are available in the
893     * FSImage.
894     * </pre>
895     */
896    public int getLayoutVersion() {
897      return layoutVersion_;
898    }
899
900    // optional string codec = 3;
901    public static final int CODEC_FIELD_NUMBER = 3;
902    private java.lang.Object codec_;
903    /**
904     * <code>optional string codec = 3;</code>
905     */
906    public boolean hasCodec() {
907      return ((bitField0_ & 0x00000004) == 0x00000004);
908    }
909    /**
910     * <code>optional string codec = 3;</code>
911     */
912    public java.lang.String getCodec() {
913      java.lang.Object ref = codec_;
914      if (ref instanceof java.lang.String) {
915        return (java.lang.String) ref;
916      } else {
917        com.google.protobuf.ByteString bs = 
918            (com.google.protobuf.ByteString) ref;
919        java.lang.String s = bs.toStringUtf8();
920        if (bs.isValidUtf8()) {
921          codec_ = s;
922        }
923        return s;
924      }
925    }
926    /**
927     * <code>optional string codec = 3;</code>
928     */
929    public com.google.protobuf.ByteString
930        getCodecBytes() {
931      java.lang.Object ref = codec_;
932      if (ref instanceof java.lang.String) {
933        com.google.protobuf.ByteString b = 
934            com.google.protobuf.ByteString.copyFromUtf8(
935                (java.lang.String) ref);
936        codec_ = b;
937        return b;
938      } else {
939        return (com.google.protobuf.ByteString) ref;
940      }
941    }
942
943    // repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;
944    public static final int SECTIONS_FIELD_NUMBER = 4;
945    private java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> sections_;
946    /**
947     * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
948     */
949    public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> getSectionsList() {
950      return sections_;
951    }
952    /**
953     * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
954     */
955    public java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
956        getSectionsOrBuilderList() {
957      return sections_;
958    }
959    /**
960     * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
961     */
962    public int getSectionsCount() {
963      return sections_.size();
964    }
965    /**
966     * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
967     */
968    public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getSections(int index) {
969      return sections_.get(index);
970    }
971    /**
972     * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
973     */
974    public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder getSectionsOrBuilder(
975        int index) {
976      return sections_.get(index);
977    }
978
979    private void initFields() {
980      ondiskVersion_ = 0;
981      layoutVersion_ = 0;
982      codec_ = "";
983      sections_ = java.util.Collections.emptyList();
984    }
985    private byte memoizedIsInitialized = -1;
986    public final boolean isInitialized() {
987      byte isInitialized = memoizedIsInitialized;
988      if (isInitialized != -1) return isInitialized == 1;
989
990      if (!hasOndiskVersion()) {
991        memoizedIsInitialized = 0;
992        return false;
993      }
994      if (!hasLayoutVersion()) {
995        memoizedIsInitialized = 0;
996        return false;
997      }
998      memoizedIsInitialized = 1;
999      return true;
1000    }
1001
1002    public void writeTo(com.google.protobuf.CodedOutputStream output)
1003                        throws java.io.IOException {
1004      getSerializedSize();
1005      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1006        output.writeUInt32(1, ondiskVersion_);
1007      }
1008      if (((bitField0_ & 0x00000002) == 0x00000002)) {
1009        output.writeUInt32(2, layoutVersion_);
1010      }
1011      if (((bitField0_ & 0x00000004) == 0x00000004)) {
1012        output.writeBytes(3, getCodecBytes());
1013      }
1014      for (int i = 0; i < sections_.size(); i++) {
1015        output.writeMessage(4, sections_.get(i));
1016      }
1017      getUnknownFields().writeTo(output);
1018    }
1019
1020    private int memoizedSerializedSize = -1;
1021    public int getSerializedSize() {
1022      int size = memoizedSerializedSize;
1023      if (size != -1) return size;
1024
1025      size = 0;
1026      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1027        size += com.google.protobuf.CodedOutputStream
1028          .computeUInt32Size(1, ondiskVersion_);
1029      }
1030      if (((bitField0_ & 0x00000002) == 0x00000002)) {
1031        size += com.google.protobuf.CodedOutputStream
1032          .computeUInt32Size(2, layoutVersion_);
1033      }
1034      if (((bitField0_ & 0x00000004) == 0x00000004)) {
1035        size += com.google.protobuf.CodedOutputStream
1036          .computeBytesSize(3, getCodecBytes());
1037      }
1038      for (int i = 0; i < sections_.size(); i++) {
1039        size += com.google.protobuf.CodedOutputStream
1040          .computeMessageSize(4, sections_.get(i));
1041      }
1042      size += getUnknownFields().getSerializedSize();
1043      memoizedSerializedSize = size;
1044      return size;
1045    }
1046
1047    private static final long serialVersionUID = 0L;
1048    @java.lang.Override
1049    protected java.lang.Object writeReplace()
1050        throws java.io.ObjectStreamException {
1051      return super.writeReplace();
1052    }
1053
1054    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1055        com.google.protobuf.ByteString data)
1056        throws com.google.protobuf.InvalidProtocolBufferException {
1057      return PARSER.parseFrom(data);
1058    }
1059    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1060        com.google.protobuf.ByteString data,
1061        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1062        throws com.google.protobuf.InvalidProtocolBufferException {
1063      return PARSER.parseFrom(data, extensionRegistry);
1064    }
1065    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(byte[] data)
1066        throws com.google.protobuf.InvalidProtocolBufferException {
1067      return PARSER.parseFrom(data);
1068    }
1069    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1070        byte[] data,
1071        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1072        throws com.google.protobuf.InvalidProtocolBufferException {
1073      return PARSER.parseFrom(data, extensionRegistry);
1074    }
1075    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(java.io.InputStream input)
1076        throws java.io.IOException {
1077      return PARSER.parseFrom(input);
1078    }
1079    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1080        java.io.InputStream input,
1081        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1082        throws java.io.IOException {
1083      return PARSER.parseFrom(input, extensionRegistry);
1084    }
1085    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseDelimitedFrom(java.io.InputStream input)
1086        throws java.io.IOException {
1087      return PARSER.parseDelimitedFrom(input);
1088    }
1089    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseDelimitedFrom(
1090        java.io.InputStream input,
1091        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1092        throws java.io.IOException {
1093      return PARSER.parseDelimitedFrom(input, extensionRegistry);
1094    }
1095    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1096        com.google.protobuf.CodedInputStream input)
1097        throws java.io.IOException {
1098      return PARSER.parseFrom(input);
1099    }
1100    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parseFrom(
1101        com.google.protobuf.CodedInputStream input,
1102        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1103        throws java.io.IOException {
1104      return PARSER.parseFrom(input, extensionRegistry);
1105    }
1106
1107    public static Builder newBuilder() { return Builder.create(); }
1108    public Builder newBuilderForType() { return newBuilder(); }
1109    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary prototype) {
1110      return newBuilder().mergeFrom(prototype);
1111    }
1112    public Builder toBuilder() { return newBuilder(this); }
1113
1114    @java.lang.Override
1115    protected Builder newBuilderForType(
1116        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1117      Builder builder = new Builder(parent);
1118      return builder;
1119    }
1120    /**
1121     * Protobuf type {@code hadoop.hdfs.fsimage.FileSummary}
1122     */
1123    public static final class Builder extends
1124        com.google.protobuf.GeneratedMessage.Builder<Builder>
1125       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummaryOrBuilder {
1126      public static final com.google.protobuf.Descriptors.Descriptor
1127          getDescriptor() {
1128        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
1129      }
1130
1131      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1132          internalGetFieldAccessorTable() {
1133        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable
1134            .ensureFieldAccessorsInitialized(
1135                org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Builder.class);
1136      }
1137
1138      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.newBuilder()
1139      private Builder() {
1140        maybeForceBuilderInitialization();
1141      }
1142
1143      private Builder(
1144          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1145        super(parent);
1146        maybeForceBuilderInitialization();
1147      }
1148      private void maybeForceBuilderInitialization() {
1149        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1150          getSectionsFieldBuilder();
1151        }
1152      }
1153      private static Builder create() {
1154        return new Builder();
1155      }
1156
1157      public Builder clear() {
1158        super.clear();
1159        ondiskVersion_ = 0;
1160        bitField0_ = (bitField0_ & ~0x00000001);
1161        layoutVersion_ = 0;
1162        bitField0_ = (bitField0_ & ~0x00000002);
1163        codec_ = "";
1164        bitField0_ = (bitField0_ & ~0x00000004);
1165        if (sectionsBuilder_ == null) {
1166          sections_ = java.util.Collections.emptyList();
1167          bitField0_ = (bitField0_ & ~0x00000008);
1168        } else {
1169          sectionsBuilder_.clear();
1170        }
1171        return this;
1172      }
1173
1174      public Builder clone() {
1175        return create().mergeFrom(buildPartial());
1176      }
1177
1178      public com.google.protobuf.Descriptors.Descriptor
1179          getDescriptorForType() {
1180        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
1181      }
1182
1183      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary getDefaultInstanceForType() {
1184        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.getDefaultInstance();
1185      }
1186
1187      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary build() {
1188        org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary result = buildPartial();
1189        if (!result.isInitialized()) {
1190          throw newUninitializedMessageException(result);
1191        }
1192        return result;
1193      }
1194
1195      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary buildPartial() {
1196        org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary(this);
1197        int from_bitField0_ = bitField0_;
1198        int to_bitField0_ = 0;
1199        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1200          to_bitField0_ |= 0x00000001;
1201        }
1202        result.ondiskVersion_ = ondiskVersion_;
1203        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1204          to_bitField0_ |= 0x00000002;
1205        }
1206        result.layoutVersion_ = layoutVersion_;
1207        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
1208          to_bitField0_ |= 0x00000004;
1209        }
1210        result.codec_ = codec_;
1211        if (sectionsBuilder_ == null) {
1212          if (((bitField0_ & 0x00000008) == 0x00000008)) {
1213            sections_ = java.util.Collections.unmodifiableList(sections_);
1214            bitField0_ = (bitField0_ & ~0x00000008);
1215          }
1216          result.sections_ = sections_;
1217        } else {
1218          result.sections_ = sectionsBuilder_.build();
1219        }
1220        result.bitField0_ = to_bitField0_;
1221        onBuilt();
1222        return result;
1223      }
1224
1225      public Builder mergeFrom(com.google.protobuf.Message other) {
1226        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) {
1227          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary)other);
1228        } else {
1229          super.mergeFrom(other);
1230          return this;
1231        }
1232      }
1233
1234      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary other) {
1235        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.getDefaultInstance()) return this;
1236        if (other.hasOndiskVersion()) {
1237          setOndiskVersion(other.getOndiskVersion());
1238        }
1239        if (other.hasLayoutVersion()) {
1240          setLayoutVersion(other.getLayoutVersion());
1241        }
1242        if (other.hasCodec()) {
1243          bitField0_ |= 0x00000004;
1244          codec_ = other.codec_;
1245          onChanged();
1246        }
1247        if (sectionsBuilder_ == null) {
1248          if (!other.sections_.isEmpty()) {
1249            if (sections_.isEmpty()) {
1250              sections_ = other.sections_;
1251              bitField0_ = (bitField0_ & ~0x00000008);
1252            } else {
1253              ensureSectionsIsMutable();
1254              sections_.addAll(other.sections_);
1255            }
1256            onChanged();
1257          }
1258        } else {
1259          if (!other.sections_.isEmpty()) {
1260            if (sectionsBuilder_.isEmpty()) {
1261              sectionsBuilder_.dispose();
1262              sectionsBuilder_ = null;
1263              sections_ = other.sections_;
1264              bitField0_ = (bitField0_ & ~0x00000008);
1265              sectionsBuilder_ = 
1266                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
1267                   getSectionsFieldBuilder() : null;
1268            } else {
1269              sectionsBuilder_.addAllMessages(other.sections_);
1270            }
1271          }
1272        }
1273        this.mergeUnknownFields(other.getUnknownFields());
1274        return this;
1275      }
1276
1277      public final boolean isInitialized() {
1278        if (!hasOndiskVersion()) {
1279          
1280          return false;
1281        }
1282        if (!hasLayoutVersion()) {
1283          
1284          return false;
1285        }
1286        return true;
1287      }
1288
1289      public Builder mergeFrom(
1290          com.google.protobuf.CodedInputStream input,
1291          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1292          throws java.io.IOException {
1293        org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary parsedMessage = null;
1294        try {
1295          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1296        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1297          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) e.getUnfinishedMessage();
1298          throw e;
1299        } finally {
1300          if (parsedMessage != null) {
1301            mergeFrom(parsedMessage);
1302          }
1303        }
1304        return this;
1305      }
1306      private int bitField0_;
1307
1308      // required uint32 ondiskVersion = 1;
1309      private int ondiskVersion_ ;
1310      /**
1311       * <code>required uint32 ondiskVersion = 1;</code>
1312       *
1313       * <pre>
1314       * The version of the above EBNF grammars.
1315       * </pre>
1316       */
1317      public boolean hasOndiskVersion() {
1318        return ((bitField0_ & 0x00000001) == 0x00000001);
1319      }
1320      /**
1321       * <code>required uint32 ondiskVersion = 1;</code>
1322       *
1323       * <pre>
1324       * The version of the above EBNF grammars.
1325       * </pre>
1326       */
1327      public int getOndiskVersion() {
1328        return ondiskVersion_;
1329      }
1330      /**
1331       * <code>required uint32 ondiskVersion = 1;</code>
1332       *
1333       * <pre>
1334       * The version of the above EBNF grammars.
1335       * </pre>
1336       */
1337      public Builder setOndiskVersion(int value) {
1338        bitField0_ |= 0x00000001;
1339        ondiskVersion_ = value;
1340        onChanged();
1341        return this;
1342      }
1343      /**
1344       * <code>required uint32 ondiskVersion = 1;</code>
1345       *
1346       * <pre>
1347       * The version of the above EBNF grammars.
1348       * </pre>
1349       */
1350      public Builder clearOndiskVersion() {
1351        bitField0_ = (bitField0_ & ~0x00000001);
1352        ondiskVersion_ = 0;
1353        onChanged();
1354        return this;
1355      }
1356
1357      // required uint32 layoutVersion = 2;
1358      private int layoutVersion_ ;
1359      /**
1360       * <code>required uint32 layoutVersion = 2;</code>
1361       *
1362       * <pre>
1363       * layoutVersion describes which features are available in the
1364       * FSImage.
1365       * </pre>
1366       */
1367      public boolean hasLayoutVersion() {
1368        return ((bitField0_ & 0x00000002) == 0x00000002);
1369      }
1370      /**
1371       * <code>required uint32 layoutVersion = 2;</code>
1372       *
1373       * <pre>
1374       * layoutVersion describes which features are available in the
1375       * FSImage.
1376       * </pre>
1377       */
1378      public int getLayoutVersion() {
1379        return layoutVersion_;
1380      }
1381      /**
1382       * <code>required uint32 layoutVersion = 2;</code>
1383       *
1384       * <pre>
1385       * layoutVersion describes which features are available in the
1386       * FSImage.
1387       * </pre>
1388       */
1389      public Builder setLayoutVersion(int value) {
1390        bitField0_ |= 0x00000002;
1391        layoutVersion_ = value;
1392        onChanged();
1393        return this;
1394      }
1395      /**
1396       * <code>required uint32 layoutVersion = 2;</code>
1397       *
1398       * <pre>
1399       * layoutVersion describes which features are available in the
1400       * FSImage.
1401       * </pre>
1402       */
1403      public Builder clearLayoutVersion() {
1404        bitField0_ = (bitField0_ & ~0x00000002);
1405        layoutVersion_ = 0;
1406        onChanged();
1407        return this;
1408      }
1409
1410      // optional string codec = 3;
1411      private java.lang.Object codec_ = "";
1412      /**
1413       * <code>optional string codec = 3;</code>
1414       */
1415      public boolean hasCodec() {
1416        return ((bitField0_ & 0x00000004) == 0x00000004);
1417      }
1418      /**
1419       * <code>optional string codec = 3;</code>
1420       */
1421      public java.lang.String getCodec() {
1422        java.lang.Object ref = codec_;
1423        if (!(ref instanceof java.lang.String)) {
1424          java.lang.String s = ((com.google.protobuf.ByteString) ref)
1425              .toStringUtf8();
1426          codec_ = s;
1427          return s;
1428        } else {
1429          return (java.lang.String) ref;
1430        }
1431      }
1432      /**
1433       * <code>optional string codec = 3;</code>
1434       */
1435      public com.google.protobuf.ByteString
1436          getCodecBytes() {
1437        java.lang.Object ref = codec_;
1438        if (ref instanceof String) {
1439          com.google.protobuf.ByteString b = 
1440              com.google.protobuf.ByteString.copyFromUtf8(
1441                  (java.lang.String) ref);
1442          codec_ = b;
1443          return b;
1444        } else {
1445          return (com.google.protobuf.ByteString) ref;
1446        }
1447      }
1448      /**
1449       * <code>optional string codec = 3;</code>
1450       */
1451      public Builder setCodec(
1452          java.lang.String value) {
1453        if (value == null) {
1454    throw new NullPointerException();
1455  }
1456  bitField0_ |= 0x00000004;
1457        codec_ = value;
1458        onChanged();
1459        return this;
1460      }
1461      /**
1462       * <code>optional string codec = 3;</code>
1463       */
1464      public Builder clearCodec() {
1465        bitField0_ = (bitField0_ & ~0x00000004);
1466        codec_ = getDefaultInstance().getCodec();
1467        onChanged();
1468        return this;
1469      }
1470      /**
1471       * <code>optional string codec = 3;</code>
1472       */
1473      public Builder setCodecBytes(
1474          com.google.protobuf.ByteString value) {
1475        if (value == null) {
1476    throw new NullPointerException();
1477  }
1478  bitField0_ |= 0x00000004;
1479        codec_ = value;
1480        onChanged();
1481        return this;
1482      }
1483
1484      // repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;
1485      private java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> sections_ =
1486        java.util.Collections.emptyList();
1487      private void ensureSectionsIsMutable() {
1488        if (!((bitField0_ & 0x00000008) == 0x00000008)) {
1489          sections_ = new java.util.ArrayList<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section>(sections_);
1490          bitField0_ |= 0x00000008;
1491         }
1492      }
1493
1494      private com.google.protobuf.RepeatedFieldBuilder<
1495          org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> sectionsBuilder_;
1496
1497      /**
1498       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1499       */
1500      public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> getSectionsList() {
1501        if (sectionsBuilder_ == null) {
1502          return java.util.Collections.unmodifiableList(sections_);
1503        } else {
1504          return sectionsBuilder_.getMessageList();
1505        }
1506      }
1507      /**
1508       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1509       */
1510      public int getSectionsCount() {
1511        if (sectionsBuilder_ == null) {
1512          return sections_.size();
1513        } else {
1514          return sectionsBuilder_.getCount();
1515        }
1516      }
1517      /**
1518       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1519       */
1520      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section getSections(int index) {
1521        if (sectionsBuilder_ == null) {
1522          return sections_.get(index);
1523        } else {
1524          return sectionsBuilder_.getMessage(index);
1525        }
1526      }
1527      /**
1528       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1529       */
1530      public Builder setSections(
1531          int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section value) {
1532        if (sectionsBuilder_ == null) {
1533          if (value == null) {
1534            throw new NullPointerException();
1535          }
1536          ensureSectionsIsMutable();
1537          sections_.set(index, value);
1538          onChanged();
1539        } else {
1540          sectionsBuilder_.setMessage(index, value);
1541        }
1542        return this;
1543      }
1544      /**
1545       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1546       */
1547      public Builder setSections(
1548          int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder builderForValue) {
1549        if (sectionsBuilder_ == null) {
1550          ensureSectionsIsMutable();
1551          sections_.set(index, builderForValue.build());
1552          onChanged();
1553        } else {
1554          sectionsBuilder_.setMessage(index, builderForValue.build());
1555        }
1556        return this;
1557      }
1558      /**
1559       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1560       */
1561      public Builder addSections(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section value) {
1562        if (sectionsBuilder_ == null) {
1563          if (value == null) {
1564            throw new NullPointerException();
1565          }
1566          ensureSectionsIsMutable();
1567          sections_.add(value);
1568          onChanged();
1569        } else {
1570          sectionsBuilder_.addMessage(value);
1571        }
1572        return this;
1573      }
1574      /**
1575       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1576       */
1577      public Builder addSections(
1578          int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section value) {
1579        if (sectionsBuilder_ == null) {
1580          if (value == null) {
1581            throw new NullPointerException();
1582          }
1583          ensureSectionsIsMutable();
1584          sections_.add(index, value);
1585          onChanged();
1586        } else {
1587          sectionsBuilder_.addMessage(index, value);
1588        }
1589        return this;
1590      }
1591      /**
1592       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1593       */
1594      public Builder addSections(
1595          org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder builderForValue) {
1596        if (sectionsBuilder_ == null) {
1597          ensureSectionsIsMutable();
1598          sections_.add(builderForValue.build());
1599          onChanged();
1600        } else {
1601          sectionsBuilder_.addMessage(builderForValue.build());
1602        }
1603        return this;
1604      }
1605      /**
1606       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1607       */
1608      public Builder addSections(
1609          int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder builderForValue) {
1610        if (sectionsBuilder_ == null) {
1611          ensureSectionsIsMutable();
1612          sections_.add(index, builderForValue.build());
1613          onChanged();
1614        } else {
1615          sectionsBuilder_.addMessage(index, builderForValue.build());
1616        }
1617        return this;
1618      }
1619      /**
1620       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1621       */
1622      public Builder addAllSections(
1623          java.lang.Iterable<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section> values) {
1624        if (sectionsBuilder_ == null) {
1625          ensureSectionsIsMutable();
1626          super.addAll(values, sections_);
1627          onChanged();
1628        } else {
1629          sectionsBuilder_.addAllMessages(values);
1630        }
1631        return this;
1632      }
1633      /**
1634       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1635       */
1636      public Builder clearSections() {
1637        if (sectionsBuilder_ == null) {
1638          sections_ = java.util.Collections.emptyList();
1639          bitField0_ = (bitField0_ & ~0x00000008);
1640          onChanged();
1641        } else {
1642          sectionsBuilder_.clear();
1643        }
1644        return this;
1645      }
1646      /**
1647       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1648       */
1649      public Builder removeSections(int index) {
1650        if (sectionsBuilder_ == null) {
1651          ensureSectionsIsMutable();
1652          sections_.remove(index);
1653          onChanged();
1654        } else {
1655          sectionsBuilder_.remove(index);
1656        }
1657        return this;
1658      }
1659      /**
1660       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1661       */
1662      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder getSectionsBuilder(
1663          int index) {
1664        return getSectionsFieldBuilder().getBuilder(index);
1665      }
1666      /**
1667       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1668       */
1669      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder getSectionsOrBuilder(
1670          int index) {
1671        if (sectionsBuilder_ == null) {
1672          return sections_.get(index);  } else {
1673          return sectionsBuilder_.getMessageOrBuilder(index);
1674        }
1675      }
1676      /**
1677       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1678       */
1679      public java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
1680           getSectionsOrBuilderList() {
1681        if (sectionsBuilder_ != null) {
1682          return sectionsBuilder_.getMessageOrBuilderList();
1683        } else {
1684          return java.util.Collections.unmodifiableList(sections_);
1685        }
1686      }
1687      /**
1688       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1689       */
1690      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder addSectionsBuilder() {
1691        return getSectionsFieldBuilder().addBuilder(
1692            org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance());
1693      }
1694      /**
1695       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1696       */
1697      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder addSectionsBuilder(
1698          int index) {
1699        return getSectionsFieldBuilder().addBuilder(
1700            index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.getDefaultInstance());
1701      }
1702      /**
1703       * <code>repeated .hadoop.hdfs.fsimage.FileSummary.Section sections = 4;</code>
1704       */
1705      public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder> 
1706           getSectionsBuilderList() {
1707        return getSectionsFieldBuilder().getBuilderList();
1708      }
1709      private com.google.protobuf.RepeatedFieldBuilder<
1710          org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder> 
1711          getSectionsFieldBuilder() {
1712        if (sectionsBuilder_ == null) {
1713          sectionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
1714              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.Section.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.SectionOrBuilder>(
1715                  sections_,
1716                  ((bitField0_ & 0x00000008) == 0x00000008),
1717                  getParentForChildren(),
1718                  isClean());
1719          sections_ = null;
1720        }
1721        return sectionsBuilder_;
1722      }
1723
1724      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FileSummary)
1725    }
1726
1727    static {
1728      defaultInstance = new FileSummary(true);
1729      defaultInstance.initFields();
1730    }
1731
1732    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FileSummary)
1733  }
1734
1735  public interface NameSystemSectionOrBuilder
1736      extends com.google.protobuf.MessageOrBuilder {
1737
1738    // optional uint32 namespaceId = 1;
1739    /**
1740     * <code>optional uint32 namespaceId = 1;</code>
1741     */
1742    boolean hasNamespaceId();
1743    /**
1744     * <code>optional uint32 namespaceId = 1;</code>
1745     */
1746    int getNamespaceId();
1747
1748    // optional uint64 genstampV1 = 2;
1749    /**
1750     * <code>optional uint64 genstampV1 = 2;</code>
1751     */
1752    boolean hasGenstampV1();
1753    /**
1754     * <code>optional uint64 genstampV1 = 2;</code>
1755     */
1756    long getGenstampV1();
1757
1758    // optional uint64 genstampV2 = 3;
1759    /**
1760     * <code>optional uint64 genstampV2 = 3;</code>
1761     */
1762    boolean hasGenstampV2();
1763    /**
1764     * <code>optional uint64 genstampV2 = 3;</code>
1765     */
1766    long getGenstampV2();
1767
1768    // optional uint64 genstampV1Limit = 4;
1769    /**
1770     * <code>optional uint64 genstampV1Limit = 4;</code>
1771     */
1772    boolean hasGenstampV1Limit();
1773    /**
1774     * <code>optional uint64 genstampV1Limit = 4;</code>
1775     */
1776    long getGenstampV1Limit();
1777
1778    // optional uint64 lastAllocatedBlockId = 5;
1779    /**
1780     * <code>optional uint64 lastAllocatedBlockId = 5;</code>
1781     */
1782    boolean hasLastAllocatedBlockId();
1783    /**
1784     * <code>optional uint64 lastAllocatedBlockId = 5;</code>
1785     */
1786    long getLastAllocatedBlockId();
1787
1788    // optional uint64 transactionId = 6;
1789    /**
1790     * <code>optional uint64 transactionId = 6;</code>
1791     */
1792    boolean hasTransactionId();
1793    /**
1794     * <code>optional uint64 transactionId = 6;</code>
1795     */
1796    long getTransactionId();
1797
1798    // optional uint64 rollingUpgradeStartTime = 7;
1799    /**
1800     * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
1801     */
1802    boolean hasRollingUpgradeStartTime();
1803    /**
1804     * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
1805     */
1806    long getRollingUpgradeStartTime();
1807  }
1808  /**
1809   * Protobuf type {@code hadoop.hdfs.fsimage.NameSystemSection}
1810   *
1811   * <pre>
1812   **
1813   * Name: NS_INFO
1814   * </pre>
1815   */
1816  public static final class NameSystemSection extends
1817      com.google.protobuf.GeneratedMessage
1818      implements NameSystemSectionOrBuilder {
1819    // Use NameSystemSection.newBuilder() to construct.
1820    private NameSystemSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1821      super(builder);
1822      this.unknownFields = builder.getUnknownFields();
1823    }
1824    private NameSystemSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1825
1826    private static final NameSystemSection defaultInstance;
1827    public static NameSystemSection getDefaultInstance() {
1828      return defaultInstance;
1829    }
1830
1831    public NameSystemSection getDefaultInstanceForType() {
1832      return defaultInstance;
1833    }
1834
1835    private final com.google.protobuf.UnknownFieldSet unknownFields;
1836    @java.lang.Override
1837    public final com.google.protobuf.UnknownFieldSet
1838        getUnknownFields() {
1839      return this.unknownFields;
1840    }
1841    private NameSystemSection(
1842        com.google.protobuf.CodedInputStream input,
1843        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1844        throws com.google.protobuf.InvalidProtocolBufferException {
1845      initFields();
1846      int mutable_bitField0_ = 0;
1847      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1848          com.google.protobuf.UnknownFieldSet.newBuilder();
1849      try {
1850        boolean done = false;
1851        while (!done) {
1852          int tag = input.readTag();
1853          switch (tag) {
1854            case 0:
1855              done = true;
1856              break;
1857            default: {
1858              if (!parseUnknownField(input, unknownFields,
1859                                     extensionRegistry, tag)) {
1860                done = true;
1861              }
1862              break;
1863            }
1864            case 8: {
1865              bitField0_ |= 0x00000001;
1866              namespaceId_ = input.readUInt32();
1867              break;
1868            }
1869            case 16: {
1870              bitField0_ |= 0x00000002;
1871              genstampV1_ = input.readUInt64();
1872              break;
1873            }
1874            case 24: {
1875              bitField0_ |= 0x00000004;
1876              genstampV2_ = input.readUInt64();
1877              break;
1878            }
1879            case 32: {
1880              bitField0_ |= 0x00000008;
1881              genstampV1Limit_ = input.readUInt64();
1882              break;
1883            }
1884            case 40: {
1885              bitField0_ |= 0x00000010;
1886              lastAllocatedBlockId_ = input.readUInt64();
1887              break;
1888            }
1889            case 48: {
1890              bitField0_ |= 0x00000020;
1891              transactionId_ = input.readUInt64();
1892              break;
1893            }
1894            case 56: {
1895              bitField0_ |= 0x00000040;
1896              rollingUpgradeStartTime_ = input.readUInt64();
1897              break;
1898            }
1899          }
1900        }
1901      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1902        throw e.setUnfinishedMessage(this);
1903      } catch (java.io.IOException e) {
1904        throw new com.google.protobuf.InvalidProtocolBufferException(
1905            e.getMessage()).setUnfinishedMessage(this);
1906      } finally {
1907        this.unknownFields = unknownFields.build();
1908        makeExtensionsImmutable();
1909      }
1910    }
1911    public static final com.google.protobuf.Descriptors.Descriptor
1912        getDescriptor() {
1913      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
1914    }
1915
1916    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1917        internalGetFieldAccessorTable() {
1918      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable
1919          .ensureFieldAccessorsInitialized(
1920              org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.Builder.class);
1921    }
1922
1923    public static com.google.protobuf.Parser<NameSystemSection> PARSER =
1924        new com.google.protobuf.AbstractParser<NameSystemSection>() {
1925      public NameSystemSection parsePartialFrom(
1926          com.google.protobuf.CodedInputStream input,
1927          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1928          throws com.google.protobuf.InvalidProtocolBufferException {
1929        return new NameSystemSection(input, extensionRegistry);
1930      }
1931    };
1932
1933    @java.lang.Override
1934    public com.google.protobuf.Parser<NameSystemSection> getParserForType() {
1935      return PARSER;
1936    }
1937
1938    private int bitField0_;
1939    // optional uint32 namespaceId = 1;
1940    public static final int NAMESPACEID_FIELD_NUMBER = 1;
1941    private int namespaceId_;
1942    /**
1943     * <code>optional uint32 namespaceId = 1;</code>
1944     */
1945    public boolean hasNamespaceId() {
1946      return ((bitField0_ & 0x00000001) == 0x00000001);
1947    }
1948    /**
1949     * <code>optional uint32 namespaceId = 1;</code>
1950     */
1951    public int getNamespaceId() {
1952      return namespaceId_;
1953    }
1954
1955    // optional uint64 genstampV1 = 2;
1956    public static final int GENSTAMPV1_FIELD_NUMBER = 2;
1957    private long genstampV1_;
1958    /**
1959     * <code>optional uint64 genstampV1 = 2;</code>
1960     */
1961    public boolean hasGenstampV1() {
1962      return ((bitField0_ & 0x00000002) == 0x00000002);
1963    }
1964    /**
1965     * <code>optional uint64 genstampV1 = 2;</code>
1966     */
1967    public long getGenstampV1() {
1968      return genstampV1_;
1969    }
1970
1971    // optional uint64 genstampV2 = 3;
1972    public static final int GENSTAMPV2_FIELD_NUMBER = 3;
1973    private long genstampV2_;
1974    /**
1975     * <code>optional uint64 genstampV2 = 3;</code>
1976     */
1977    public boolean hasGenstampV2() {
1978      return ((bitField0_ & 0x00000004) == 0x00000004);
1979    }
1980    /**
1981     * <code>optional uint64 genstampV2 = 3;</code>
1982     */
1983    public long getGenstampV2() {
1984      return genstampV2_;
1985    }
1986
1987    // optional uint64 genstampV1Limit = 4;
1988    public static final int GENSTAMPV1LIMIT_FIELD_NUMBER = 4;
1989    private long genstampV1Limit_;
1990    /**
1991     * <code>optional uint64 genstampV1Limit = 4;</code>
1992     */
1993    public boolean hasGenstampV1Limit() {
1994      return ((bitField0_ & 0x00000008) == 0x00000008);
1995    }
1996    /**
1997     * <code>optional uint64 genstampV1Limit = 4;</code>
1998     */
1999    public long getGenstampV1Limit() {
2000      return genstampV1Limit_;
2001    }
2002
2003    // optional uint64 lastAllocatedBlockId = 5;
2004    public static final int LASTALLOCATEDBLOCKID_FIELD_NUMBER = 5;
2005    private long lastAllocatedBlockId_;
2006    /**
2007     * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2008     */
2009    public boolean hasLastAllocatedBlockId() {
2010      return ((bitField0_ & 0x00000010) == 0x00000010);
2011    }
2012    /**
2013     * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2014     */
2015    public long getLastAllocatedBlockId() {
2016      return lastAllocatedBlockId_;
2017    }
2018
2019    // optional uint64 transactionId = 6;
2020    public static final int TRANSACTIONID_FIELD_NUMBER = 6;
2021    private long transactionId_;
2022    /**
2023     * <code>optional uint64 transactionId = 6;</code>
2024     */
2025    public boolean hasTransactionId() {
2026      return ((bitField0_ & 0x00000020) == 0x00000020);
2027    }
2028    /**
2029     * <code>optional uint64 transactionId = 6;</code>
2030     */
2031    public long getTransactionId() {
2032      return transactionId_;
2033    }
2034
2035    // optional uint64 rollingUpgradeStartTime = 7;
2036    public static final int ROLLINGUPGRADESTARTTIME_FIELD_NUMBER = 7;
2037    private long rollingUpgradeStartTime_;
2038    /**
2039     * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2040     */
2041    public boolean hasRollingUpgradeStartTime() {
2042      return ((bitField0_ & 0x00000040) == 0x00000040);
2043    }
2044    /**
2045     * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2046     */
2047    public long getRollingUpgradeStartTime() {
2048      return rollingUpgradeStartTime_;
2049    }
2050
2051    private void initFields() {
2052      namespaceId_ = 0;
2053      genstampV1_ = 0L;
2054      genstampV2_ = 0L;
2055      genstampV1Limit_ = 0L;
2056      lastAllocatedBlockId_ = 0L;
2057      transactionId_ = 0L;
2058      rollingUpgradeStartTime_ = 0L;
2059    }
2060    private byte memoizedIsInitialized = -1;
2061    public final boolean isInitialized() {
2062      byte isInitialized = memoizedIsInitialized;
2063      if (isInitialized != -1) return isInitialized == 1;
2064
2065      memoizedIsInitialized = 1;
2066      return true;
2067    }
2068
2069    public void writeTo(com.google.protobuf.CodedOutputStream output)
2070                        throws java.io.IOException {
2071      getSerializedSize();
2072      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2073        output.writeUInt32(1, namespaceId_);
2074      }
2075      if (((bitField0_ & 0x00000002) == 0x00000002)) {
2076        output.writeUInt64(2, genstampV1_);
2077      }
2078      if (((bitField0_ & 0x00000004) == 0x00000004)) {
2079        output.writeUInt64(3, genstampV2_);
2080      }
2081      if (((bitField0_ & 0x00000008) == 0x00000008)) {
2082        output.writeUInt64(4, genstampV1Limit_);
2083      }
2084      if (((bitField0_ & 0x00000010) == 0x00000010)) {
2085        output.writeUInt64(5, lastAllocatedBlockId_);
2086      }
2087      if (((bitField0_ & 0x00000020) == 0x00000020)) {
2088        output.writeUInt64(6, transactionId_);
2089      }
2090      if (((bitField0_ & 0x00000040) == 0x00000040)) {
2091        output.writeUInt64(7, rollingUpgradeStartTime_);
2092      }
2093      getUnknownFields().writeTo(output);
2094    }
2095
2096    private int memoizedSerializedSize = -1;
2097    public int getSerializedSize() {
2098      int size = memoizedSerializedSize;
2099      if (size != -1) return size;
2100
2101      size = 0;
2102      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2103        size += com.google.protobuf.CodedOutputStream
2104          .computeUInt32Size(1, namespaceId_);
2105      }
2106      if (((bitField0_ & 0x00000002) == 0x00000002)) {
2107        size += com.google.protobuf.CodedOutputStream
2108          .computeUInt64Size(2, genstampV1_);
2109      }
2110      if (((bitField0_ & 0x00000004) == 0x00000004)) {
2111        size += com.google.protobuf.CodedOutputStream
2112          .computeUInt64Size(3, genstampV2_);
2113      }
2114      if (((bitField0_ & 0x00000008) == 0x00000008)) {
2115        size += com.google.protobuf.CodedOutputStream
2116          .computeUInt64Size(4, genstampV1Limit_);
2117      }
2118      if (((bitField0_ & 0x00000010) == 0x00000010)) {
2119        size += com.google.protobuf.CodedOutputStream
2120          .computeUInt64Size(5, lastAllocatedBlockId_);
2121      }
2122      if (((bitField0_ & 0x00000020) == 0x00000020)) {
2123        size += com.google.protobuf.CodedOutputStream
2124          .computeUInt64Size(6, transactionId_);
2125      }
2126      if (((bitField0_ & 0x00000040) == 0x00000040)) {
2127        size += com.google.protobuf.CodedOutputStream
2128          .computeUInt64Size(7, rollingUpgradeStartTime_);
2129      }
2130      size += getUnknownFields().getSerializedSize();
2131      memoizedSerializedSize = size;
2132      return size;
2133    }
2134
2135    private static final long serialVersionUID = 0L;
2136    @java.lang.Override
2137    protected java.lang.Object writeReplace()
2138        throws java.io.ObjectStreamException {
2139      return super.writeReplace();
2140    }
2141
2142    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2143        com.google.protobuf.ByteString data)
2144        throws com.google.protobuf.InvalidProtocolBufferException {
2145      return PARSER.parseFrom(data);
2146    }
2147    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2148        com.google.protobuf.ByteString data,
2149        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2150        throws com.google.protobuf.InvalidProtocolBufferException {
2151      return PARSER.parseFrom(data, extensionRegistry);
2152    }
2153    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(byte[] data)
2154        throws com.google.protobuf.InvalidProtocolBufferException {
2155      return PARSER.parseFrom(data);
2156    }
2157    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2158        byte[] data,
2159        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2160        throws com.google.protobuf.InvalidProtocolBufferException {
2161      return PARSER.parseFrom(data, extensionRegistry);
2162    }
2163    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(java.io.InputStream input)
2164        throws java.io.IOException {
2165      return PARSER.parseFrom(input);
2166    }
2167    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2168        java.io.InputStream input,
2169        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2170        throws java.io.IOException {
2171      return PARSER.parseFrom(input, extensionRegistry);
2172    }
2173    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseDelimitedFrom(java.io.InputStream input)
2174        throws java.io.IOException {
2175      return PARSER.parseDelimitedFrom(input);
2176    }
2177    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseDelimitedFrom(
2178        java.io.InputStream input,
2179        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2180        throws java.io.IOException {
2181      return PARSER.parseDelimitedFrom(input, extensionRegistry);
2182    }
2183    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2184        com.google.protobuf.CodedInputStream input)
2185        throws java.io.IOException {
2186      return PARSER.parseFrom(input);
2187    }
2188    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parseFrom(
2189        com.google.protobuf.CodedInputStream input,
2190        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2191        throws java.io.IOException {
2192      return PARSER.parseFrom(input, extensionRegistry);
2193    }
2194
2195    public static Builder newBuilder() { return Builder.create(); }
2196    public Builder newBuilderForType() { return newBuilder(); }
2197    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection prototype) {
2198      return newBuilder().mergeFrom(prototype);
2199    }
2200    public Builder toBuilder() { return newBuilder(this); }
2201
2202    @java.lang.Override
2203    protected Builder newBuilderForType(
2204        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2205      Builder builder = new Builder(parent);
2206      return builder;
2207    }
2208    /**
2209     * Protobuf type {@code hadoop.hdfs.fsimage.NameSystemSection}
2210     *
2211     * <pre>
2212     **
2213     * Name: NS_INFO
2214     * </pre>
2215     */
2216    public static final class Builder extends
2217        com.google.protobuf.GeneratedMessage.Builder<Builder>
2218       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSectionOrBuilder {
2219      public static final com.google.protobuf.Descriptors.Descriptor
2220          getDescriptor() {
2221        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
2222      }
2223
2224      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2225          internalGetFieldAccessorTable() {
2226        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable
2227            .ensureFieldAccessorsInitialized(
2228                org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.Builder.class);
2229      }
2230
2231      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.newBuilder()
2232      private Builder() {
2233        maybeForceBuilderInitialization();
2234      }
2235
2236      private Builder(
2237          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2238        super(parent);
2239        maybeForceBuilderInitialization();
2240      }
2241      private void maybeForceBuilderInitialization() {
2242        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2243        }
2244      }
2245      private static Builder create() {
2246        return new Builder();
2247      }
2248
2249      public Builder clear() {
2250        super.clear();
2251        namespaceId_ = 0;
2252        bitField0_ = (bitField0_ & ~0x00000001);
2253        genstampV1_ = 0L;
2254        bitField0_ = (bitField0_ & ~0x00000002);
2255        genstampV2_ = 0L;
2256        bitField0_ = (bitField0_ & ~0x00000004);
2257        genstampV1Limit_ = 0L;
2258        bitField0_ = (bitField0_ & ~0x00000008);
2259        lastAllocatedBlockId_ = 0L;
2260        bitField0_ = (bitField0_ & ~0x00000010);
2261        transactionId_ = 0L;
2262        bitField0_ = (bitField0_ & ~0x00000020);
2263        rollingUpgradeStartTime_ = 0L;
2264        bitField0_ = (bitField0_ & ~0x00000040);
2265        return this;
2266      }
2267
2268      public Builder clone() {
2269        return create().mergeFrom(buildPartial());
2270      }
2271
2272      public com.google.protobuf.Descriptors.Descriptor
2273          getDescriptorForType() {
2274        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
2275      }
2276
2277      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection getDefaultInstanceForType() {
2278        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.getDefaultInstance();
2279      }
2280
2281      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection build() {
2282        org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection result = buildPartial();
2283        if (!result.isInitialized()) {
2284          throw newUninitializedMessageException(result);
2285        }
2286        return result;
2287      }
2288
2289      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection buildPartial() {
2290        org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection(this);
2291        int from_bitField0_ = bitField0_;
2292        int to_bitField0_ = 0;
2293        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2294          to_bitField0_ |= 0x00000001;
2295        }
2296        result.namespaceId_ = namespaceId_;
2297        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
2298          to_bitField0_ |= 0x00000002;
2299        }
2300        result.genstampV1_ = genstampV1_;
2301        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
2302          to_bitField0_ |= 0x00000004;
2303        }
2304        result.genstampV2_ = genstampV2_;
2305        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
2306          to_bitField0_ |= 0x00000008;
2307        }
2308        result.genstampV1Limit_ = genstampV1Limit_;
2309        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
2310          to_bitField0_ |= 0x00000010;
2311        }
2312        result.lastAllocatedBlockId_ = lastAllocatedBlockId_;
2313        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
2314          to_bitField0_ |= 0x00000020;
2315        }
2316        result.transactionId_ = transactionId_;
2317        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
2318          to_bitField0_ |= 0x00000040;
2319        }
2320        result.rollingUpgradeStartTime_ = rollingUpgradeStartTime_;
2321        result.bitField0_ = to_bitField0_;
2322        onBuilt();
2323        return result;
2324      }
2325
2326      public Builder mergeFrom(com.google.protobuf.Message other) {
2327        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection) {
2328          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection)other);
2329        } else {
2330          super.mergeFrom(other);
2331          return this;
2332        }
2333      }
2334
2335      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection other) {
2336        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection.getDefaultInstance()) return this;
2337        if (other.hasNamespaceId()) {
2338          setNamespaceId(other.getNamespaceId());
2339        }
2340        if (other.hasGenstampV1()) {
2341          setGenstampV1(other.getGenstampV1());
2342        }
2343        if (other.hasGenstampV2()) {
2344          setGenstampV2(other.getGenstampV2());
2345        }
2346        if (other.hasGenstampV1Limit()) {
2347          setGenstampV1Limit(other.getGenstampV1Limit());
2348        }
2349        if (other.hasLastAllocatedBlockId()) {
2350          setLastAllocatedBlockId(other.getLastAllocatedBlockId());
2351        }
2352        if (other.hasTransactionId()) {
2353          setTransactionId(other.getTransactionId());
2354        }
2355        if (other.hasRollingUpgradeStartTime()) {
2356          setRollingUpgradeStartTime(other.getRollingUpgradeStartTime());
2357        }
2358        this.mergeUnknownFields(other.getUnknownFields());
2359        return this;
2360      }
2361
2362      public final boolean isInitialized() {
2363        return true;
2364      }
2365
2366      public Builder mergeFrom(
2367          com.google.protobuf.CodedInputStream input,
2368          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2369          throws java.io.IOException {
2370        org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection parsedMessage = null;
2371        try {
2372          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2373        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2374          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection) e.getUnfinishedMessage();
2375          throw e;
2376        } finally {
2377          if (parsedMessage != null) {
2378            mergeFrom(parsedMessage);
2379          }
2380        }
2381        return this;
2382      }
2383      private int bitField0_;
2384
2385      // optional uint32 namespaceId = 1;
2386      private int namespaceId_ ;
2387      /**
2388       * <code>optional uint32 namespaceId = 1;</code>
2389       */
2390      public boolean hasNamespaceId() {
2391        return ((bitField0_ & 0x00000001) == 0x00000001);
2392      }
2393      /**
2394       * <code>optional uint32 namespaceId = 1;</code>
2395       */
2396      public int getNamespaceId() {
2397        return namespaceId_;
2398      }
2399      /**
2400       * <code>optional uint32 namespaceId = 1;</code>
2401       */
2402      public Builder setNamespaceId(int value) {
2403        bitField0_ |= 0x00000001;
2404        namespaceId_ = value;
2405        onChanged();
2406        return this;
2407      }
2408      /**
2409       * <code>optional uint32 namespaceId = 1;</code>
2410       */
2411      public Builder clearNamespaceId() {
2412        bitField0_ = (bitField0_ & ~0x00000001);
2413        namespaceId_ = 0;
2414        onChanged();
2415        return this;
2416      }
2417
2418      // optional uint64 genstampV1 = 2;
2419      private long genstampV1_ ;
2420      /**
2421       * <code>optional uint64 genstampV1 = 2;</code>
2422       */
2423      public boolean hasGenstampV1() {
2424        return ((bitField0_ & 0x00000002) == 0x00000002);
2425      }
2426      /**
2427       * <code>optional uint64 genstampV1 = 2;</code>
2428       */
2429      public long getGenstampV1() {
2430        return genstampV1_;
2431      }
2432      /**
2433       * <code>optional uint64 genstampV1 = 2;</code>
2434       */
2435      public Builder setGenstampV1(long value) {
2436        bitField0_ |= 0x00000002;
2437        genstampV1_ = value;
2438        onChanged();
2439        return this;
2440      }
2441      /**
2442       * <code>optional uint64 genstampV1 = 2;</code>
2443       */
2444      public Builder clearGenstampV1() {
2445        bitField0_ = (bitField0_ & ~0x00000002);
2446        genstampV1_ = 0L;
2447        onChanged();
2448        return this;
2449      }
2450
2451      // optional uint64 genstampV2 = 3;
2452      private long genstampV2_ ;
2453      /**
2454       * <code>optional uint64 genstampV2 = 3;</code>
2455       */
2456      public boolean hasGenstampV2() {
2457        return ((bitField0_ & 0x00000004) == 0x00000004);
2458      }
2459      /**
2460       * <code>optional uint64 genstampV2 = 3;</code>
2461       */
2462      public long getGenstampV2() {
2463        return genstampV2_;
2464      }
2465      /**
2466       * <code>optional uint64 genstampV2 = 3;</code>
2467       */
2468      public Builder setGenstampV2(long value) {
2469        bitField0_ |= 0x00000004;
2470        genstampV2_ = value;
2471        onChanged();
2472        return this;
2473      }
2474      /**
2475       * <code>optional uint64 genstampV2 = 3;</code>
2476       */
2477      public Builder clearGenstampV2() {
2478        bitField0_ = (bitField0_ & ~0x00000004);
2479        genstampV2_ = 0L;
2480        onChanged();
2481        return this;
2482      }
2483
2484      // optional uint64 genstampV1Limit = 4;
2485      private long genstampV1Limit_ ;
2486      /**
2487       * <code>optional uint64 genstampV1Limit = 4;</code>
2488       */
2489      public boolean hasGenstampV1Limit() {
2490        return ((bitField0_ & 0x00000008) == 0x00000008);
2491      }
2492      /**
2493       * <code>optional uint64 genstampV1Limit = 4;</code>
2494       */
2495      public long getGenstampV1Limit() {
2496        return genstampV1Limit_;
2497      }
2498      /**
2499       * <code>optional uint64 genstampV1Limit = 4;</code>
2500       */
2501      public Builder setGenstampV1Limit(long value) {
2502        bitField0_ |= 0x00000008;
2503        genstampV1Limit_ = value;
2504        onChanged();
2505        return this;
2506      }
2507      /**
2508       * <code>optional uint64 genstampV1Limit = 4;</code>
2509       */
2510      public Builder clearGenstampV1Limit() {
2511        bitField0_ = (bitField0_ & ~0x00000008);
2512        genstampV1Limit_ = 0L;
2513        onChanged();
2514        return this;
2515      }
2516
2517      // optional uint64 lastAllocatedBlockId = 5;
2518      private long lastAllocatedBlockId_ ;
2519      /**
2520       * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2521       */
2522      public boolean hasLastAllocatedBlockId() {
2523        return ((bitField0_ & 0x00000010) == 0x00000010);
2524      }
2525      /**
2526       * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2527       */
2528      public long getLastAllocatedBlockId() {
2529        return lastAllocatedBlockId_;
2530      }
2531      /**
2532       * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2533       */
2534      public Builder setLastAllocatedBlockId(long value) {
2535        bitField0_ |= 0x00000010;
2536        lastAllocatedBlockId_ = value;
2537        onChanged();
2538        return this;
2539      }
2540      /**
2541       * <code>optional uint64 lastAllocatedBlockId = 5;</code>
2542       */
2543      public Builder clearLastAllocatedBlockId() {
2544        bitField0_ = (bitField0_ & ~0x00000010);
2545        lastAllocatedBlockId_ = 0L;
2546        onChanged();
2547        return this;
2548      }
2549
2550      // optional uint64 transactionId = 6;
2551      private long transactionId_ ;
2552      /**
2553       * <code>optional uint64 transactionId = 6;</code>
2554       */
2555      public boolean hasTransactionId() {
2556        return ((bitField0_ & 0x00000020) == 0x00000020);
2557      }
2558      /**
2559       * <code>optional uint64 transactionId = 6;</code>
2560       */
2561      public long getTransactionId() {
2562        return transactionId_;
2563      }
2564      /**
2565       * <code>optional uint64 transactionId = 6;</code>
2566       */
2567      public Builder setTransactionId(long value) {
2568        bitField0_ |= 0x00000020;
2569        transactionId_ = value;
2570        onChanged();
2571        return this;
2572      }
2573      /**
2574       * <code>optional uint64 transactionId = 6;</code>
2575       */
2576      public Builder clearTransactionId() {
2577        bitField0_ = (bitField0_ & ~0x00000020);
2578        transactionId_ = 0L;
2579        onChanged();
2580        return this;
2581      }
2582
2583      // optional uint64 rollingUpgradeStartTime = 7;
2584      private long rollingUpgradeStartTime_ ;
2585      /**
2586       * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2587       */
2588      public boolean hasRollingUpgradeStartTime() {
2589        return ((bitField0_ & 0x00000040) == 0x00000040);
2590      }
2591      /**
2592       * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2593       */
2594      public long getRollingUpgradeStartTime() {
2595        return rollingUpgradeStartTime_;
2596      }
2597      /**
2598       * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2599       */
2600      public Builder setRollingUpgradeStartTime(long value) {
2601        bitField0_ |= 0x00000040;
2602        rollingUpgradeStartTime_ = value;
2603        onChanged();
2604        return this;
2605      }
2606      /**
2607       * <code>optional uint64 rollingUpgradeStartTime = 7;</code>
2608       */
2609      public Builder clearRollingUpgradeStartTime() {
2610        bitField0_ = (bitField0_ & ~0x00000040);
2611        rollingUpgradeStartTime_ = 0L;
2612        onChanged();
2613        return this;
2614      }
2615
2616      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.NameSystemSection)
2617    }
2618
2619    static {
2620      defaultInstance = new NameSystemSection(true);
2621      defaultInstance.initFields();
2622    }
2623
2624    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.NameSystemSection)
2625  }
2626
2627  public interface INodeSectionOrBuilder
2628      extends com.google.protobuf.MessageOrBuilder {
2629
2630    // optional uint64 lastInodeId = 1;
2631    /**
2632     * <code>optional uint64 lastInodeId = 1;</code>
2633     */
2634    boolean hasLastInodeId();
2635    /**
2636     * <code>optional uint64 lastInodeId = 1;</code>
2637     */
2638    long getLastInodeId();
2639
2640    // optional uint64 numInodes = 2;
2641    /**
2642     * <code>optional uint64 numInodes = 2;</code>
2643     *
2644     * <pre>
2645     * repeated INodes..
2646     * </pre>
2647     */
2648    boolean hasNumInodes();
2649    /**
2650     * <code>optional uint64 numInodes = 2;</code>
2651     *
2652     * <pre>
2653     * repeated INodes..
2654     * </pre>
2655     */
2656    long getNumInodes();
2657  }
2658  /**
2659   * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection}
2660   *
2661   * <pre>
2662   **
2663   * Permission is serialized as a 64-bit long. [0:24):[25:48):[48:64) (in Big Endian).
2664   * The first and the second parts are the string ids of the user and
2665   * group name, and the last 16 bits are the permission bits.
2666   *
2667   * Name: INODE
2668   * </pre>
2669   */
2670  public static final class INodeSection extends
2671      com.google.protobuf.GeneratedMessage
2672      implements INodeSectionOrBuilder {
2673    // Use INodeSection.newBuilder() to construct.
2674    private INodeSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2675      super(builder);
2676      this.unknownFields = builder.getUnknownFields();
2677    }
2678    private INodeSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2679
2680    private static final INodeSection defaultInstance;
2681    public static INodeSection getDefaultInstance() {
2682      return defaultInstance;
2683    }
2684
2685    public INodeSection getDefaultInstanceForType() {
2686      return defaultInstance;
2687    }
2688
2689    private final com.google.protobuf.UnknownFieldSet unknownFields;
2690    @java.lang.Override
2691    public final com.google.protobuf.UnknownFieldSet
2692        getUnknownFields() {
2693      return this.unknownFields;
2694    }
2695    private INodeSection(
2696        com.google.protobuf.CodedInputStream input,
2697        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2698        throws com.google.protobuf.InvalidProtocolBufferException {
2699      initFields();
2700      int mutable_bitField0_ = 0;
2701      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2702          com.google.protobuf.UnknownFieldSet.newBuilder();
2703      try {
2704        boolean done = false;
2705        while (!done) {
2706          int tag = input.readTag();
2707          switch (tag) {
2708            case 0:
2709              done = true;
2710              break;
2711            default: {
2712              if (!parseUnknownField(input, unknownFields,
2713                                     extensionRegistry, tag)) {
2714                done = true;
2715              }
2716              break;
2717            }
2718            case 8: {
2719              bitField0_ |= 0x00000001;
2720              lastInodeId_ = input.readUInt64();
2721              break;
2722            }
2723            case 16: {
2724              bitField0_ |= 0x00000002;
2725              numInodes_ = input.readUInt64();
2726              break;
2727            }
2728          }
2729        }
2730      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2731        throw e.setUnfinishedMessage(this);
2732      } catch (java.io.IOException e) {
2733        throw new com.google.protobuf.InvalidProtocolBufferException(
2734            e.getMessage()).setUnfinishedMessage(this);
2735      } finally {
2736        this.unknownFields = unknownFields.build();
2737        makeExtensionsImmutable();
2738      }
2739    }
2740    public static final com.google.protobuf.Descriptors.Descriptor
2741        getDescriptor() {
2742      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
2743    }
2744
2745    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2746        internalGetFieldAccessorTable() {
2747      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable
2748          .ensureFieldAccessorsInitialized(
2749              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.Builder.class);
2750    }
2751
2752    public static com.google.protobuf.Parser<INodeSection> PARSER =
2753        new com.google.protobuf.AbstractParser<INodeSection>() {
2754      public INodeSection parsePartialFrom(
2755          com.google.protobuf.CodedInputStream input,
2756          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2757          throws com.google.protobuf.InvalidProtocolBufferException {
2758        return new INodeSection(input, extensionRegistry);
2759      }
2760    };
2761
2762    @java.lang.Override
2763    public com.google.protobuf.Parser<INodeSection> getParserForType() {
2764      return PARSER;
2765    }
2766
2767    public interface FileUnderConstructionFeatureOrBuilder
2768        extends com.google.protobuf.MessageOrBuilder {
2769
2770      // optional string clientName = 1;
2771      /**
2772       * <code>optional string clientName = 1;</code>
2773       */
2774      boolean hasClientName();
2775      /**
2776       * <code>optional string clientName = 1;</code>
2777       */
2778      java.lang.String getClientName();
2779      /**
2780       * <code>optional string clientName = 1;</code>
2781       */
2782      com.google.protobuf.ByteString
2783          getClientNameBytes();
2784
2785      // optional string clientMachine = 2;
2786      /**
2787       * <code>optional string clientMachine = 2;</code>
2788       */
2789      boolean hasClientMachine();
2790      /**
2791       * <code>optional string clientMachine = 2;</code>
2792       */
2793      java.lang.String getClientMachine();
2794      /**
2795       * <code>optional string clientMachine = 2;</code>
2796       */
2797      com.google.protobuf.ByteString
2798          getClientMachineBytes();
2799    }
2800    /**
2801     * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature}
2802     *
2803     * <pre>
2804     **
2805     * under-construction feature for INodeFile
2806     * </pre>
2807     */
2808    public static final class FileUnderConstructionFeature extends
2809        com.google.protobuf.GeneratedMessage
2810        implements FileUnderConstructionFeatureOrBuilder {
2811      // Use FileUnderConstructionFeature.newBuilder() to construct.
2812      private FileUnderConstructionFeature(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2813        super(builder);
2814        this.unknownFields = builder.getUnknownFields();
2815      }
2816      private FileUnderConstructionFeature(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2817
2818      private static final FileUnderConstructionFeature defaultInstance;
2819      public static FileUnderConstructionFeature getDefaultInstance() {
2820        return defaultInstance;
2821      }
2822
2823      public FileUnderConstructionFeature getDefaultInstanceForType() {
2824        return defaultInstance;
2825      }
2826
2827      private final com.google.protobuf.UnknownFieldSet unknownFields;
2828      @java.lang.Override
2829      public final com.google.protobuf.UnknownFieldSet
2830          getUnknownFields() {
2831        return this.unknownFields;
2832      }
2833      private FileUnderConstructionFeature(
2834          com.google.protobuf.CodedInputStream input,
2835          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2836          throws com.google.protobuf.InvalidProtocolBufferException {
2837        initFields();
2838        int mutable_bitField0_ = 0;
2839        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2840            com.google.protobuf.UnknownFieldSet.newBuilder();
2841        try {
2842          boolean done = false;
2843          while (!done) {
2844            int tag = input.readTag();
2845            switch (tag) {
2846              case 0:
2847                done = true;
2848                break;
2849              default: {
2850                if (!parseUnknownField(input, unknownFields,
2851                                       extensionRegistry, tag)) {
2852                  done = true;
2853                }
2854                break;
2855              }
2856              case 10: {
2857                bitField0_ |= 0x00000001;
2858                clientName_ = input.readBytes();
2859                break;
2860              }
2861              case 18: {
2862                bitField0_ |= 0x00000002;
2863                clientMachine_ = input.readBytes();
2864                break;
2865              }
2866            }
2867          }
2868        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2869          throw e.setUnfinishedMessage(this);
2870        } catch (java.io.IOException e) {
2871          throw new com.google.protobuf.InvalidProtocolBufferException(
2872              e.getMessage()).setUnfinishedMessage(this);
2873        } finally {
2874          this.unknownFields = unknownFields.build();
2875          makeExtensionsImmutable();
2876        }
2877      }
2878      public static final com.google.protobuf.Descriptors.Descriptor
2879          getDescriptor() {
2880        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
2881      }
2882
2883      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2884          internalGetFieldAccessorTable() {
2885        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable
2886            .ensureFieldAccessorsInitialized(
2887                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder.class);
2888      }
2889
2890      public static com.google.protobuf.Parser<FileUnderConstructionFeature> PARSER =
2891          new com.google.protobuf.AbstractParser<FileUnderConstructionFeature>() {
2892        public FileUnderConstructionFeature parsePartialFrom(
2893            com.google.protobuf.CodedInputStream input,
2894            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2895            throws com.google.protobuf.InvalidProtocolBufferException {
2896          return new FileUnderConstructionFeature(input, extensionRegistry);
2897        }
2898      };
2899
2900      @java.lang.Override
2901      public com.google.protobuf.Parser<FileUnderConstructionFeature> getParserForType() {
2902        return PARSER;
2903      }
2904
2905      private int bitField0_;
2906      // optional string clientName = 1;
2907      public static final int CLIENTNAME_FIELD_NUMBER = 1;
2908      private java.lang.Object clientName_;
2909      /**
2910       * <code>optional string clientName = 1;</code>
2911       */
2912      public boolean hasClientName() {
2913        return ((bitField0_ & 0x00000001) == 0x00000001);
2914      }
2915      /**
2916       * <code>optional string clientName = 1;</code>
2917       */
2918      public java.lang.String getClientName() {
2919        java.lang.Object ref = clientName_;
2920        if (ref instanceof java.lang.String) {
2921          return (java.lang.String) ref;
2922        } else {
2923          com.google.protobuf.ByteString bs = 
2924              (com.google.protobuf.ByteString) ref;
2925          java.lang.String s = bs.toStringUtf8();
2926          if (bs.isValidUtf8()) {
2927            clientName_ = s;
2928          }
2929          return s;
2930        }
2931      }
2932      /**
2933       * <code>optional string clientName = 1;</code>
2934       */
2935      public com.google.protobuf.ByteString
2936          getClientNameBytes() {
2937        java.lang.Object ref = clientName_;
2938        if (ref instanceof java.lang.String) {
2939          com.google.protobuf.ByteString b = 
2940              com.google.protobuf.ByteString.copyFromUtf8(
2941                  (java.lang.String) ref);
2942          clientName_ = b;
2943          return b;
2944        } else {
2945          return (com.google.protobuf.ByteString) ref;
2946        }
2947      }
2948
2949      // optional string clientMachine = 2;
2950      public static final int CLIENTMACHINE_FIELD_NUMBER = 2;
2951      private java.lang.Object clientMachine_;
2952      /**
2953       * <code>optional string clientMachine = 2;</code>
2954       */
2955      public boolean hasClientMachine() {
2956        return ((bitField0_ & 0x00000002) == 0x00000002);
2957      }
2958      /**
2959       * <code>optional string clientMachine = 2;</code>
2960       */
2961      public java.lang.String getClientMachine() {
2962        java.lang.Object ref = clientMachine_;
2963        if (ref instanceof java.lang.String) {
2964          return (java.lang.String) ref;
2965        } else {
2966          com.google.protobuf.ByteString bs = 
2967              (com.google.protobuf.ByteString) ref;
2968          java.lang.String s = bs.toStringUtf8();
2969          if (bs.isValidUtf8()) {
2970            clientMachine_ = s;
2971          }
2972          return s;
2973        }
2974      }
2975      /**
2976       * <code>optional string clientMachine = 2;</code>
2977       */
2978      public com.google.protobuf.ByteString
2979          getClientMachineBytes() {
2980        java.lang.Object ref = clientMachine_;
2981        if (ref instanceof java.lang.String) {
2982          com.google.protobuf.ByteString b = 
2983              com.google.protobuf.ByteString.copyFromUtf8(
2984                  (java.lang.String) ref);
2985          clientMachine_ = b;
2986          return b;
2987        } else {
2988          return (com.google.protobuf.ByteString) ref;
2989        }
2990      }
2991
2992      private void initFields() {
2993        clientName_ = "";
2994        clientMachine_ = "";
2995      }
2996      private byte memoizedIsInitialized = -1;
2997      public final boolean isInitialized() {
2998        byte isInitialized = memoizedIsInitialized;
2999        if (isInitialized != -1) return isInitialized == 1;
3000
3001        memoizedIsInitialized = 1;
3002        return true;
3003      }
3004
3005      public void writeTo(com.google.protobuf.CodedOutputStream output)
3006                          throws java.io.IOException {
3007        getSerializedSize();
3008        if (((bitField0_ & 0x00000001) == 0x00000001)) {
3009          output.writeBytes(1, getClientNameBytes());
3010        }
3011        if (((bitField0_ & 0x00000002) == 0x00000002)) {
3012          output.writeBytes(2, getClientMachineBytes());
3013        }
3014        getUnknownFields().writeTo(output);
3015      }
3016
3017      private int memoizedSerializedSize = -1;
3018      public int getSerializedSize() {
3019        int size = memoizedSerializedSize;
3020        if (size != -1) return size;
3021
3022        size = 0;
3023        if (((bitField0_ & 0x00000001) == 0x00000001)) {
3024          size += com.google.protobuf.CodedOutputStream
3025            .computeBytesSize(1, getClientNameBytes());
3026        }
3027        if (((bitField0_ & 0x00000002) == 0x00000002)) {
3028          size += com.google.protobuf.CodedOutputStream
3029            .computeBytesSize(2, getClientMachineBytes());
3030        }
3031        size += getUnknownFields().getSerializedSize();
3032        memoizedSerializedSize = size;
3033        return size;
3034      }
3035
3036      private static final long serialVersionUID = 0L;
3037      @java.lang.Override
3038      protected java.lang.Object writeReplace()
3039          throws java.io.ObjectStreamException {
3040        return super.writeReplace();
3041      }
3042
3043      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3044          com.google.protobuf.ByteString data)
3045          throws com.google.protobuf.InvalidProtocolBufferException {
3046        return PARSER.parseFrom(data);
3047      }
3048      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3049          com.google.protobuf.ByteString data,
3050          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3051          throws com.google.protobuf.InvalidProtocolBufferException {
3052        return PARSER.parseFrom(data, extensionRegistry);
3053      }
3054      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(byte[] data)
3055          throws com.google.protobuf.InvalidProtocolBufferException {
3056        return PARSER.parseFrom(data);
3057      }
3058      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3059          byte[] data,
3060          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3061          throws com.google.protobuf.InvalidProtocolBufferException {
3062        return PARSER.parseFrom(data, extensionRegistry);
3063      }
3064      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(java.io.InputStream input)
3065          throws java.io.IOException {
3066        return PARSER.parseFrom(input);
3067      }
3068      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3069          java.io.InputStream input,
3070          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3071          throws java.io.IOException {
3072        return PARSER.parseFrom(input, extensionRegistry);
3073      }
3074      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseDelimitedFrom(java.io.InputStream input)
3075          throws java.io.IOException {
3076        return PARSER.parseDelimitedFrom(input);
3077      }
3078      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseDelimitedFrom(
3079          java.io.InputStream input,
3080          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3081          throws java.io.IOException {
3082        return PARSER.parseDelimitedFrom(input, extensionRegistry);
3083      }
3084      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3085          com.google.protobuf.CodedInputStream input)
3086          throws java.io.IOException {
3087        return PARSER.parseFrom(input);
3088      }
3089      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parseFrom(
3090          com.google.protobuf.CodedInputStream input,
3091          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3092          throws java.io.IOException {
3093        return PARSER.parseFrom(input, extensionRegistry);
3094      }
3095
3096      public static Builder newBuilder() { return Builder.create(); }
3097      public Builder newBuilderForType() { return newBuilder(); }
3098      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature prototype) {
3099        return newBuilder().mergeFrom(prototype);
3100      }
3101      public Builder toBuilder() { return newBuilder(this); }
3102
3103      @java.lang.Override
3104      protected Builder newBuilderForType(
3105          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3106        Builder builder = new Builder(parent);
3107        return builder;
3108      }
3109      /**
3110       * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature}
3111       *
3112       * <pre>
3113       **
3114       * under-construction feature for INodeFile
3115       * </pre>
3116       */
3117      public static final class Builder extends
3118          com.google.protobuf.GeneratedMessage.Builder<Builder>
3119         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder {
3120        public static final com.google.protobuf.Descriptors.Descriptor
3121            getDescriptor() {
3122          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
3123        }
3124
3125        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3126            internalGetFieldAccessorTable() {
3127          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable
3128              .ensureFieldAccessorsInitialized(
3129                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder.class);
3130        }
3131
3132        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.newBuilder()
3133        private Builder() {
3134          maybeForceBuilderInitialization();
3135        }
3136
3137        private Builder(
3138            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3139          super(parent);
3140          maybeForceBuilderInitialization();
3141        }
3142        private void maybeForceBuilderInitialization() {
3143          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3144          }
3145        }
3146        private static Builder create() {
3147          return new Builder();
3148        }
3149
3150        public Builder clear() {
3151          super.clear();
3152          clientName_ = "";
3153          bitField0_ = (bitField0_ & ~0x00000001);
3154          clientMachine_ = "";
3155          bitField0_ = (bitField0_ & ~0x00000002);
3156          return this;
3157        }
3158
3159        public Builder clone() {
3160          return create().mergeFrom(buildPartial());
3161        }
3162
3163        public com.google.protobuf.Descriptors.Descriptor
3164            getDescriptorForType() {
3165          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
3166        }
3167
3168        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getDefaultInstanceForType() {
3169          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
3170        }
3171
3172        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature build() {
3173          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature result = buildPartial();
3174          if (!result.isInitialized()) {
3175            throw newUninitializedMessageException(result);
3176          }
3177          return result;
3178        }
3179
3180        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature buildPartial() {
3181          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature(this);
3182          int from_bitField0_ = bitField0_;
3183          int to_bitField0_ = 0;
3184          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3185            to_bitField0_ |= 0x00000001;
3186          }
3187          result.clientName_ = clientName_;
3188          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3189            to_bitField0_ |= 0x00000002;
3190          }
3191          result.clientMachine_ = clientMachine_;
3192          result.bitField0_ = to_bitField0_;
3193          onBuilt();
3194          return result;
3195        }
3196
3197        public Builder mergeFrom(com.google.protobuf.Message other) {
3198          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature) {
3199            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature)other);
3200          } else {
3201            super.mergeFrom(other);
3202            return this;
3203          }
3204        }
3205
3206        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature other) {
3207          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance()) return this;
3208          if (other.hasClientName()) {
3209            bitField0_ |= 0x00000001;
3210            clientName_ = other.clientName_;
3211            onChanged();
3212          }
3213          if (other.hasClientMachine()) {
3214            bitField0_ |= 0x00000002;
3215            clientMachine_ = other.clientMachine_;
3216            onChanged();
3217          }
3218          this.mergeUnknownFields(other.getUnknownFields());
3219          return this;
3220        }
3221
3222        public final boolean isInitialized() {
3223          return true;
3224        }
3225
3226        public Builder mergeFrom(
3227            com.google.protobuf.CodedInputStream input,
3228            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3229            throws java.io.IOException {
3230          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature parsedMessage = null;
3231          try {
3232            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3233          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3234            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature) e.getUnfinishedMessage();
3235            throw e;
3236          } finally {
3237            if (parsedMessage != null) {
3238              mergeFrom(parsedMessage);
3239            }
3240          }
3241          return this;
3242        }
3243        private int bitField0_;
3244
3245        // optional string clientName = 1;
3246        private java.lang.Object clientName_ = "";
3247        /**
3248         * <code>optional string clientName = 1;</code>
3249         */
3250        public boolean hasClientName() {
3251          return ((bitField0_ & 0x00000001) == 0x00000001);
3252        }
3253        /**
3254         * <code>optional string clientName = 1;</code>
3255         */
3256        public java.lang.String getClientName() {
3257          java.lang.Object ref = clientName_;
3258          if (!(ref instanceof java.lang.String)) {
3259            java.lang.String s = ((com.google.protobuf.ByteString) ref)
3260                .toStringUtf8();
3261            clientName_ = s;
3262            return s;
3263          } else {
3264            return (java.lang.String) ref;
3265          }
3266        }
3267        /**
3268         * <code>optional string clientName = 1;</code>
3269         */
3270        public com.google.protobuf.ByteString
3271            getClientNameBytes() {
3272          java.lang.Object ref = clientName_;
3273          if (ref instanceof String) {
3274            com.google.protobuf.ByteString b = 
3275                com.google.protobuf.ByteString.copyFromUtf8(
3276                    (java.lang.String) ref);
3277            clientName_ = b;
3278            return b;
3279          } else {
3280            return (com.google.protobuf.ByteString) ref;
3281          }
3282        }
3283        /**
3284         * <code>optional string clientName = 1;</code>
3285         */
3286        public Builder setClientName(
3287            java.lang.String value) {
3288          if (value == null) {
3289    throw new NullPointerException();
3290  }
3291  bitField0_ |= 0x00000001;
3292          clientName_ = value;
3293          onChanged();
3294          return this;
3295        }
3296        /**
3297         * <code>optional string clientName = 1;</code>
3298         */
3299        public Builder clearClientName() {
3300          bitField0_ = (bitField0_ & ~0x00000001);
3301          clientName_ = getDefaultInstance().getClientName();
3302          onChanged();
3303          return this;
3304        }
3305        /**
3306         * <code>optional string clientName = 1;</code>
3307         */
3308        public Builder setClientNameBytes(
3309            com.google.protobuf.ByteString value) {
3310          if (value == null) {
3311    throw new NullPointerException();
3312  }
3313  bitField0_ |= 0x00000001;
3314          clientName_ = value;
3315          onChanged();
3316          return this;
3317        }
3318
3319        // optional string clientMachine = 2;
3320        private java.lang.Object clientMachine_ = "";
3321        /**
3322         * <code>optional string clientMachine = 2;</code>
3323         */
3324        public boolean hasClientMachine() {
3325          return ((bitField0_ & 0x00000002) == 0x00000002);
3326        }
3327        /**
3328         * <code>optional string clientMachine = 2;</code>
3329         */
3330        public java.lang.String getClientMachine() {
3331          java.lang.Object ref = clientMachine_;
3332          if (!(ref instanceof java.lang.String)) {
3333            java.lang.String s = ((com.google.protobuf.ByteString) ref)
3334                .toStringUtf8();
3335            clientMachine_ = s;
3336            return s;
3337          } else {
3338            return (java.lang.String) ref;
3339          }
3340        }
3341        /**
3342         * <code>optional string clientMachine = 2;</code>
3343         */
3344        public com.google.protobuf.ByteString
3345            getClientMachineBytes() {
3346          java.lang.Object ref = clientMachine_;
3347          if (ref instanceof String) {
3348            com.google.protobuf.ByteString b = 
3349                com.google.protobuf.ByteString.copyFromUtf8(
3350                    (java.lang.String) ref);
3351            clientMachine_ = b;
3352            return b;
3353          } else {
3354            return (com.google.protobuf.ByteString) ref;
3355          }
3356        }
3357        /**
3358         * <code>optional string clientMachine = 2;</code>
3359         */
3360        public Builder setClientMachine(
3361            java.lang.String value) {
3362          if (value == null) {
3363    throw new NullPointerException();
3364  }
3365  bitField0_ |= 0x00000002;
3366          clientMachine_ = value;
3367          onChanged();
3368          return this;
3369        }
3370        /**
3371         * <code>optional string clientMachine = 2;</code>
3372         */
3373        public Builder clearClientMachine() {
3374          bitField0_ = (bitField0_ & ~0x00000002);
3375          clientMachine_ = getDefaultInstance().getClientMachine();
3376          onChanged();
3377          return this;
3378        }
3379        /**
3380         * <code>optional string clientMachine = 2;</code>
3381         */
3382        public Builder setClientMachineBytes(
3383            com.google.protobuf.ByteString value) {
3384          if (value == null) {
3385    throw new NullPointerException();
3386  }
3387  bitField0_ |= 0x00000002;
3388          clientMachine_ = value;
3389          onChanged();
3390          return this;
3391        }
3392
3393        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature)
3394      }
3395
3396      static {
3397        defaultInstance = new FileUnderConstructionFeature(true);
3398        defaultInstance.initFields();
3399      }
3400
3401      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature)
3402    }
3403
3404    public interface AclFeatureProtoOrBuilder
3405        extends com.google.protobuf.MessageOrBuilder {
3406
3407      // repeated fixed32 entries = 2 [packed = true];
3408      /**
3409       * <code>repeated fixed32 entries = 2 [packed = true];</code>
3410       *
3411       * <pre>
3412       **
3413       * An ACL entry is represented by a 32-bit integer in Big Endian
3414       * format. The bits can be divided in four segments:
3415       * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3416       *
3417       * [0:2) -- reserved for futute uses.
3418       * [2:26) -- the name of the entry, which is an ID that points to a
3419       * string in the StringTableSection.
3420       * [26:27) -- the scope of the entry (AclEntryScopeProto)
3421       * [27:29) -- the type of the entry (AclEntryTypeProto)
3422       * [29:32) -- the permission of the entry (FsActionProto)
3423       * </pre>
3424       */
3425      java.util.List<java.lang.Integer> getEntriesList();
3426      /**
3427       * <code>repeated fixed32 entries = 2 [packed = true];</code>
3428       *
3429       * <pre>
3430       **
3431       * An ACL entry is represented by a 32-bit integer in Big Endian
3432       * format. The bits can be divided in four segments:
3433       * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3434       *
3435       * [0:2) -- reserved for futute uses.
3436       * [2:26) -- the name of the entry, which is an ID that points to a
3437       * string in the StringTableSection.
3438       * [26:27) -- the scope of the entry (AclEntryScopeProto)
3439       * [27:29) -- the type of the entry (AclEntryTypeProto)
3440       * [29:32) -- the permission of the entry (FsActionProto)
3441       * </pre>
3442       */
3443      int getEntriesCount();
3444      /**
3445       * <code>repeated fixed32 entries = 2 [packed = true];</code>
3446       *
3447       * <pre>
3448       **
3449       * An ACL entry is represented by a 32-bit integer in Big Endian
3450       * format. The bits can be divided in four segments:
3451       * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3452       *
3453       * [0:2) -- reserved for futute uses.
3454       * [2:26) -- the name of the entry, which is an ID that points to a
3455       * string in the StringTableSection.
3456       * [26:27) -- the scope of the entry (AclEntryScopeProto)
3457       * [27:29) -- the type of the entry (AclEntryTypeProto)
3458       * [29:32) -- the permission of the entry (FsActionProto)
3459       * </pre>
3460       */
3461      int getEntries(int index);
3462    }
3463    /**
3464     * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.AclFeatureProto}
3465     */
3466    public static final class AclFeatureProto extends
3467        com.google.protobuf.GeneratedMessage
3468        implements AclFeatureProtoOrBuilder {
3469      // Use AclFeatureProto.newBuilder() to construct.
3470      private AclFeatureProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3471        super(builder);
3472        this.unknownFields = builder.getUnknownFields();
3473      }
3474      private AclFeatureProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3475
3476      private static final AclFeatureProto defaultInstance;
3477      public static AclFeatureProto getDefaultInstance() {
3478        return defaultInstance;
3479      }
3480
3481      public AclFeatureProto getDefaultInstanceForType() {
3482        return defaultInstance;
3483      }
3484
3485      private final com.google.protobuf.UnknownFieldSet unknownFields;
3486      @java.lang.Override
3487      public final com.google.protobuf.UnknownFieldSet
3488          getUnknownFields() {
3489        return this.unknownFields;
3490      }
3491      private AclFeatureProto(
3492          com.google.protobuf.CodedInputStream input,
3493          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3494          throws com.google.protobuf.InvalidProtocolBufferException {
3495        initFields();
3496        int mutable_bitField0_ = 0;
3497        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3498            com.google.protobuf.UnknownFieldSet.newBuilder();
3499        try {
3500          boolean done = false;
3501          while (!done) {
3502            int tag = input.readTag();
3503            switch (tag) {
3504              case 0:
3505                done = true;
3506                break;
3507              default: {
3508                if (!parseUnknownField(input, unknownFields,
3509                                       extensionRegistry, tag)) {
3510                  done = true;
3511                }
3512                break;
3513              }
3514              case 21: {
3515                if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
3516                  entries_ = new java.util.ArrayList<java.lang.Integer>();
3517                  mutable_bitField0_ |= 0x00000001;
3518                }
3519                entries_.add(input.readFixed32());
3520                break;
3521              }
3522              case 18: {
3523                int length = input.readRawVarint32();
3524                int limit = input.pushLimit(length);
3525                if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
3526                  entries_ = new java.util.ArrayList<java.lang.Integer>();
3527                  mutable_bitField0_ |= 0x00000001;
3528                }
3529                while (input.getBytesUntilLimit() > 0) {
3530                  entries_.add(input.readFixed32());
3531                }
3532                input.popLimit(limit);
3533                break;
3534              }
3535            }
3536          }
3537        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3538          throw e.setUnfinishedMessage(this);
3539        } catch (java.io.IOException e) {
3540          throw new com.google.protobuf.InvalidProtocolBufferException(
3541              e.getMessage()).setUnfinishedMessage(this);
3542        } finally {
3543          if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
3544            entries_ = java.util.Collections.unmodifiableList(entries_);
3545          }
3546          this.unknownFields = unknownFields.build();
3547          makeExtensionsImmutable();
3548        }
3549      }
3550      public static final com.google.protobuf.Descriptors.Descriptor
3551          getDescriptor() {
3552        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
3553      }
3554
3555      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3556          internalGetFieldAccessorTable() {
3557        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable
3558            .ensureFieldAccessorsInitialized(
3559                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder.class);
3560      }
3561
3562      public static com.google.protobuf.Parser<AclFeatureProto> PARSER =
3563          new com.google.protobuf.AbstractParser<AclFeatureProto>() {
3564        public AclFeatureProto parsePartialFrom(
3565            com.google.protobuf.CodedInputStream input,
3566            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3567            throws com.google.protobuf.InvalidProtocolBufferException {
3568          return new AclFeatureProto(input, extensionRegistry);
3569        }
3570      };
3571
3572      @java.lang.Override
3573      public com.google.protobuf.Parser<AclFeatureProto> getParserForType() {
3574        return PARSER;
3575      }
3576
3577      // repeated fixed32 entries = 2 [packed = true];
3578      public static final int ENTRIES_FIELD_NUMBER = 2;
3579      private java.util.List<java.lang.Integer> entries_;
3580      /**
3581       * <code>repeated fixed32 entries = 2 [packed = true];</code>
3582       *
3583       * <pre>
3584       **
3585       * An ACL entry is represented by a 32-bit integer in Big Endian
3586       * format. The bits can be divided in four segments:
3587       * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3588       *
3589       * [0:2) -- reserved for futute uses.
3590       * [2:26) -- the name of the entry, which is an ID that points to a
3591       * string in the StringTableSection.
3592       * [26:27) -- the scope of the entry (AclEntryScopeProto)
3593       * [27:29) -- the type of the entry (AclEntryTypeProto)
3594       * [29:32) -- the permission of the entry (FsActionProto)
3595       * </pre>
3596       */
3597      public java.util.List<java.lang.Integer>
3598          getEntriesList() {
3599        return entries_;
3600      }
3601      /**
3602       * <code>repeated fixed32 entries = 2 [packed = true];</code>
3603       *
3604       * <pre>
3605       **
3606       * An ACL entry is represented by a 32-bit integer in Big Endian
3607       * format. The bits can be divided in four segments:
3608       * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3609       *
3610       * [0:2) -- reserved for futute uses.
3611       * [2:26) -- the name of the entry, which is an ID that points to a
3612       * string in the StringTableSection.
3613       * [26:27) -- the scope of the entry (AclEntryScopeProto)
3614       * [27:29) -- the type of the entry (AclEntryTypeProto)
3615       * [29:32) -- the permission of the entry (FsActionProto)
3616       * </pre>
3617       */
3618      public int getEntriesCount() {
3619        return entries_.size();
3620      }
3621      /**
3622       * <code>repeated fixed32 entries = 2 [packed = true];</code>
3623       *
3624       * <pre>
3625       **
3626       * An ACL entry is represented by a 32-bit integer in Big Endian
3627       * format. The bits can be divided in four segments:
3628       * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3629       *
3630       * [0:2) -- reserved for futute uses.
3631       * [2:26) -- the name of the entry, which is an ID that points to a
3632       * string in the StringTableSection.
3633       * [26:27) -- the scope of the entry (AclEntryScopeProto)
3634       * [27:29) -- the type of the entry (AclEntryTypeProto)
3635       * [29:32) -- the permission of the entry (FsActionProto)
3636       * </pre>
3637       */
3638      public int getEntries(int index) {
3639        return entries_.get(index);
3640      }
3641      private int entriesMemoizedSerializedSize = -1;
3642
3643      private void initFields() {
3644        entries_ = java.util.Collections.emptyList();
3645      }
3646      private byte memoizedIsInitialized = -1;
3647      public final boolean isInitialized() {
3648        byte isInitialized = memoizedIsInitialized;
3649        if (isInitialized != -1) return isInitialized == 1;
3650
3651        memoizedIsInitialized = 1;
3652        return true;
3653      }
3654
3655      public void writeTo(com.google.protobuf.CodedOutputStream output)
3656                          throws java.io.IOException {
3657        getSerializedSize();
3658        if (getEntriesList().size() > 0) {
3659          output.writeRawVarint32(18);
3660          output.writeRawVarint32(entriesMemoizedSerializedSize);
3661        }
3662        for (int i = 0; i < entries_.size(); i++) {
3663          output.writeFixed32NoTag(entries_.get(i));
3664        }
3665        getUnknownFields().writeTo(output);
3666      }
3667
3668      private int memoizedSerializedSize = -1;
3669      public int getSerializedSize() {
3670        int size = memoizedSerializedSize;
3671        if (size != -1) return size;
3672
3673        size = 0;
3674        {
3675          int dataSize = 0;
3676          dataSize = 4 * getEntriesList().size();
3677          size += dataSize;
3678          if (!getEntriesList().isEmpty()) {
3679            size += 1;
3680            size += com.google.protobuf.CodedOutputStream
3681                .computeInt32SizeNoTag(dataSize);
3682          }
3683          entriesMemoizedSerializedSize = dataSize;
3684        }
3685        size += getUnknownFields().getSerializedSize();
3686        memoizedSerializedSize = size;
3687        return size;
3688      }
3689
3690      private static final long serialVersionUID = 0L;
3691      @java.lang.Override
3692      protected java.lang.Object writeReplace()
3693          throws java.io.ObjectStreamException {
3694        return super.writeReplace();
3695      }
3696
3697      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3698          com.google.protobuf.ByteString data)
3699          throws com.google.protobuf.InvalidProtocolBufferException {
3700        return PARSER.parseFrom(data);
3701      }
3702      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3703          com.google.protobuf.ByteString data,
3704          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3705          throws com.google.protobuf.InvalidProtocolBufferException {
3706        return PARSER.parseFrom(data, extensionRegistry);
3707      }
3708      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(byte[] data)
3709          throws com.google.protobuf.InvalidProtocolBufferException {
3710        return PARSER.parseFrom(data);
3711      }
3712      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3713          byte[] data,
3714          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3715          throws com.google.protobuf.InvalidProtocolBufferException {
3716        return PARSER.parseFrom(data, extensionRegistry);
3717      }
3718      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(java.io.InputStream input)
3719          throws java.io.IOException {
3720        return PARSER.parseFrom(input);
3721      }
3722      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3723          java.io.InputStream input,
3724          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3725          throws java.io.IOException {
3726        return PARSER.parseFrom(input, extensionRegistry);
3727      }
3728      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseDelimitedFrom(java.io.InputStream input)
3729          throws java.io.IOException {
3730        return PARSER.parseDelimitedFrom(input);
3731      }
3732      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseDelimitedFrom(
3733          java.io.InputStream input,
3734          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3735          throws java.io.IOException {
3736        return PARSER.parseDelimitedFrom(input, extensionRegistry);
3737      }
3738      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3739          com.google.protobuf.CodedInputStream input)
3740          throws java.io.IOException {
3741        return PARSER.parseFrom(input);
3742      }
3743      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parseFrom(
3744          com.google.protobuf.CodedInputStream input,
3745          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3746          throws java.io.IOException {
3747        return PARSER.parseFrom(input, extensionRegistry);
3748      }
3749
3750      public static Builder newBuilder() { return Builder.create(); }
3751      public Builder newBuilderForType() { return newBuilder(); }
3752      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto prototype) {
3753        return newBuilder().mergeFrom(prototype);
3754      }
3755      public Builder toBuilder() { return newBuilder(this); }
3756
3757      @java.lang.Override
3758      protected Builder newBuilderForType(
3759          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3760        Builder builder = new Builder(parent);
3761        return builder;
3762      }
3763      /**
3764       * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.AclFeatureProto}
3765       */
3766      public static final class Builder extends
3767          com.google.protobuf.GeneratedMessage.Builder<Builder>
3768         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder {
3769        public static final com.google.protobuf.Descriptors.Descriptor
3770            getDescriptor() {
3771          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
3772        }
3773
3774        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3775            internalGetFieldAccessorTable() {
3776          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable
3777              .ensureFieldAccessorsInitialized(
3778                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder.class);
3779        }
3780
3781        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.newBuilder()
3782        private Builder() {
3783          maybeForceBuilderInitialization();
3784        }
3785
3786        private Builder(
3787            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3788          super(parent);
3789          maybeForceBuilderInitialization();
3790        }
3791        private void maybeForceBuilderInitialization() {
3792          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3793          }
3794        }
3795        private static Builder create() {
3796          return new Builder();
3797        }
3798
3799        public Builder clear() {
3800          super.clear();
3801          entries_ = java.util.Collections.emptyList();
3802          bitField0_ = (bitField0_ & ~0x00000001);
3803          return this;
3804        }
3805
3806        public Builder clone() {
3807          return create().mergeFrom(buildPartial());
3808        }
3809
3810        public com.google.protobuf.Descriptors.Descriptor
3811            getDescriptorForType() {
3812          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
3813        }
3814
3815        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getDefaultInstanceForType() {
3816          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
3817        }
3818
3819        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto build() {
3820          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto result = buildPartial();
3821          if (!result.isInitialized()) {
3822            throw newUninitializedMessageException(result);
3823          }
3824          return result;
3825        }
3826
3827        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto buildPartial() {
3828          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto(this);
3829          int from_bitField0_ = bitField0_;
3830          if (((bitField0_ & 0x00000001) == 0x00000001)) {
3831            entries_ = java.util.Collections.unmodifiableList(entries_);
3832            bitField0_ = (bitField0_ & ~0x00000001);
3833          }
3834          result.entries_ = entries_;
3835          onBuilt();
3836          return result;
3837        }
3838
3839        public Builder mergeFrom(com.google.protobuf.Message other) {
3840          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto) {
3841            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto)other);
3842          } else {
3843            super.mergeFrom(other);
3844            return this;
3845          }
3846        }
3847
3848        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto other) {
3849          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance()) return this;
3850          if (!other.entries_.isEmpty()) {
3851            if (entries_.isEmpty()) {
3852              entries_ = other.entries_;
3853              bitField0_ = (bitField0_ & ~0x00000001);
3854            } else {
3855              ensureEntriesIsMutable();
3856              entries_.addAll(other.entries_);
3857            }
3858            onChanged();
3859          }
3860          this.mergeUnknownFields(other.getUnknownFields());
3861          return this;
3862        }
3863
3864        public final boolean isInitialized() {
3865          return true;
3866        }
3867
3868        public Builder mergeFrom(
3869            com.google.protobuf.CodedInputStream input,
3870            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3871            throws java.io.IOException {
3872          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto parsedMessage = null;
3873          try {
3874            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3875          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3876            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto) e.getUnfinishedMessage();
3877            throw e;
3878          } finally {
3879            if (parsedMessage != null) {
3880              mergeFrom(parsedMessage);
3881            }
3882          }
3883          return this;
3884        }
3885        private int bitField0_;
3886
3887        // repeated fixed32 entries = 2 [packed = true];
3888        private java.util.List<java.lang.Integer> entries_ = java.util.Collections.emptyList();
3889        private void ensureEntriesIsMutable() {
3890          if (!((bitField0_ & 0x00000001) == 0x00000001)) {
3891            entries_ = new java.util.ArrayList<java.lang.Integer>(entries_);
3892            bitField0_ |= 0x00000001;
3893           }
3894        }
3895        /**
3896         * <code>repeated fixed32 entries = 2 [packed = true];</code>
3897         *
3898         * <pre>
3899         **
3900         * An ACL entry is represented by a 32-bit integer in Big Endian
3901         * format. The bits can be divided in four segments:
3902         * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3903         *
3904         * [0:2) -- reserved for futute uses.
3905         * [2:26) -- the name of the entry, which is an ID that points to a
3906         * string in the StringTableSection.
3907         * [26:27) -- the scope of the entry (AclEntryScopeProto)
3908         * [27:29) -- the type of the entry (AclEntryTypeProto)
3909         * [29:32) -- the permission of the entry (FsActionProto)
3910         * </pre>
3911         */
3912        public java.util.List<java.lang.Integer>
3913            getEntriesList() {
3914          return java.util.Collections.unmodifiableList(entries_);
3915        }
3916        /**
3917         * <code>repeated fixed32 entries = 2 [packed = true];</code>
3918         *
3919         * <pre>
3920         **
3921         * An ACL entry is represented by a 32-bit integer in Big Endian
3922         * format. The bits can be divided in four segments:
3923         * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3924         *
3925         * [0:2) -- reserved for futute uses.
3926         * [2:26) -- the name of the entry, which is an ID that points to a
3927         * string in the StringTableSection.
3928         * [26:27) -- the scope of the entry (AclEntryScopeProto)
3929         * [27:29) -- the type of the entry (AclEntryTypeProto)
3930         * [29:32) -- the permission of the entry (FsActionProto)
3931         * </pre>
3932         */
3933        public int getEntriesCount() {
3934          return entries_.size();
3935        }
3936        /**
3937         * <code>repeated fixed32 entries = 2 [packed = true];</code>
3938         *
3939         * <pre>
3940         **
3941         * An ACL entry is represented by a 32-bit integer in Big Endian
3942         * format. The bits can be divided in four segments:
3943         * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3944         *
3945         * [0:2) -- reserved for futute uses.
3946         * [2:26) -- the name of the entry, which is an ID that points to a
3947         * string in the StringTableSection.
3948         * [26:27) -- the scope of the entry (AclEntryScopeProto)
3949         * [27:29) -- the type of the entry (AclEntryTypeProto)
3950         * [29:32) -- the permission of the entry (FsActionProto)
3951         * </pre>
3952         */
3953        public int getEntries(int index) {
3954          return entries_.get(index);
3955        }
3956        /**
3957         * <code>repeated fixed32 entries = 2 [packed = true];</code>
3958         *
3959         * <pre>
3960         **
3961         * An ACL entry is represented by a 32-bit integer in Big Endian
3962         * format. The bits can be divided in four segments:
3963         * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3964         *
3965         * [0:2) -- reserved for futute uses.
3966         * [2:26) -- the name of the entry, which is an ID that points to a
3967         * string in the StringTableSection.
3968         * [26:27) -- the scope of the entry (AclEntryScopeProto)
3969         * [27:29) -- the type of the entry (AclEntryTypeProto)
3970         * [29:32) -- the permission of the entry (FsActionProto)
3971         * </pre>
3972         */
3973        public Builder setEntries(
3974            int index, int value) {
3975          ensureEntriesIsMutable();
3976          entries_.set(index, value);
3977          onChanged();
3978          return this;
3979        }
3980        /**
3981         * <code>repeated fixed32 entries = 2 [packed = true];</code>
3982         *
3983         * <pre>
3984         **
3985         * An ACL entry is represented by a 32-bit integer in Big Endian
3986         * format. The bits can be divided in four segments:
3987         * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
3988         *
3989         * [0:2) -- reserved for futute uses.
3990         * [2:26) -- the name of the entry, which is an ID that points to a
3991         * string in the StringTableSection.
3992         * [26:27) -- the scope of the entry (AclEntryScopeProto)
3993         * [27:29) -- the type of the entry (AclEntryTypeProto)
3994         * [29:32) -- the permission of the entry (FsActionProto)
3995         * </pre>
3996         */
3997        public Builder addEntries(int value) {
3998          ensureEntriesIsMutable();
3999          entries_.add(value);
4000          onChanged();
4001          return this;
4002        }
4003        /**
4004         * <code>repeated fixed32 entries = 2 [packed = true];</code>
4005         *
4006         * <pre>
4007         **
4008         * An ACL entry is represented by a 32-bit integer in Big Endian
4009         * format. The bits can be divided in four segments:
4010         * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
4011         *
4012         * [0:2) -- reserved for futute uses.
4013         * [2:26) -- the name of the entry, which is an ID that points to a
4014         * string in the StringTableSection.
4015         * [26:27) -- the scope of the entry (AclEntryScopeProto)
4016         * [27:29) -- the type of the entry (AclEntryTypeProto)
4017         * [29:32) -- the permission of the entry (FsActionProto)
4018         * </pre>
4019         */
4020        public Builder addAllEntries(
4021            java.lang.Iterable<? extends java.lang.Integer> values) {
4022          ensureEntriesIsMutable();
4023          super.addAll(values, entries_);
4024          onChanged();
4025          return this;
4026        }
4027        /**
4028         * <code>repeated fixed32 entries = 2 [packed = true];</code>
4029         *
4030         * <pre>
4031         **
4032         * An ACL entry is represented by a 32-bit integer in Big Endian
4033         * format. The bits can be divided in four segments:
4034         * [0:2) || [2:26) || [26:27) || [27:29) || [29:32)
4035         *
4036         * [0:2) -- reserved for futute uses.
4037         * [2:26) -- the name of the entry, which is an ID that points to a
4038         * string in the StringTableSection.
4039         * [26:27) -- the scope of the entry (AclEntryScopeProto)
4040         * [27:29) -- the type of the entry (AclEntryTypeProto)
4041         * [29:32) -- the permission of the entry (FsActionProto)
4042         * </pre>
4043         */
4044        public Builder clearEntries() {
4045          entries_ = java.util.Collections.emptyList();
4046          bitField0_ = (bitField0_ & ~0x00000001);
4047          onChanged();
4048          return this;
4049        }
4050
4051        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.AclFeatureProto)
4052      }
4053
4054      static {
4055        defaultInstance = new AclFeatureProto(true);
4056        defaultInstance.initFields();
4057      }
4058
4059      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.AclFeatureProto)
4060    }
4061
4062    public interface XAttrCompactProtoOrBuilder
4063        extends com.google.protobuf.MessageOrBuilder {
4064
4065      // required fixed32 name = 1;
4066      /**
4067       * <code>required fixed32 name = 1;</code>
4068       *
4069       * <pre>
4070       **
4071       * 
4072       * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4073       * [2:26) -- the name of the entry, which is an ID that points to a
4074       * string in the StringTableSection. 
4075       * [26:32) -- reserved for future uses.
4076       * </pre>
4077       */
4078      boolean hasName();
4079      /**
4080       * <code>required fixed32 name = 1;</code>
4081       *
4082       * <pre>
4083       **
4084       * 
4085       * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4086       * [2:26) -- the name of the entry, which is an ID that points to a
4087       * string in the StringTableSection. 
4088       * [26:32) -- reserved for future uses.
4089       * </pre>
4090       */
4091      int getName();
4092
4093      // optional bytes value = 2;
4094      /**
4095       * <code>optional bytes value = 2;</code>
4096       */
4097      boolean hasValue();
4098      /**
4099       * <code>optional bytes value = 2;</code>
4100       */
4101      com.google.protobuf.ByteString getValue();
4102    }
4103    /**
4104     * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto}
4105     */
4106    public static final class XAttrCompactProto extends
4107        com.google.protobuf.GeneratedMessage
4108        implements XAttrCompactProtoOrBuilder {
4109      // Use XAttrCompactProto.newBuilder() to construct.
4110      private XAttrCompactProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4111        super(builder);
4112        this.unknownFields = builder.getUnknownFields();
4113      }
4114      private XAttrCompactProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4115
4116      private static final XAttrCompactProto defaultInstance;
4117      public static XAttrCompactProto getDefaultInstance() {
4118        return defaultInstance;
4119      }
4120
4121      public XAttrCompactProto getDefaultInstanceForType() {
4122        return defaultInstance;
4123      }
4124
4125      private final com.google.protobuf.UnknownFieldSet unknownFields;
4126      @java.lang.Override
4127      public final com.google.protobuf.UnknownFieldSet
4128          getUnknownFields() {
4129        return this.unknownFields;
4130      }
4131      private XAttrCompactProto(
4132          com.google.protobuf.CodedInputStream input,
4133          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4134          throws com.google.protobuf.InvalidProtocolBufferException {
4135        initFields();
4136        int mutable_bitField0_ = 0;
4137        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4138            com.google.protobuf.UnknownFieldSet.newBuilder();
4139        try {
4140          boolean done = false;
4141          while (!done) {
4142            int tag = input.readTag();
4143            switch (tag) {
4144              case 0:
4145                done = true;
4146                break;
4147              default: {
4148                if (!parseUnknownField(input, unknownFields,
4149                                       extensionRegistry, tag)) {
4150                  done = true;
4151                }
4152                break;
4153              }
4154              case 13: {
4155                bitField0_ |= 0x00000001;
4156                name_ = input.readFixed32();
4157                break;
4158              }
4159              case 18: {
4160                bitField0_ |= 0x00000002;
4161                value_ = input.readBytes();
4162                break;
4163              }
4164            }
4165          }
4166        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4167          throw e.setUnfinishedMessage(this);
4168        } catch (java.io.IOException e) {
4169          throw new com.google.protobuf.InvalidProtocolBufferException(
4170              e.getMessage()).setUnfinishedMessage(this);
4171        } finally {
4172          this.unknownFields = unknownFields.build();
4173          makeExtensionsImmutable();
4174        }
4175      }
4176      public static final com.google.protobuf.Descriptors.Descriptor
4177          getDescriptor() {
4178        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor;
4179      }
4180
4181      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4182          internalGetFieldAccessorTable() {
4183        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_fieldAccessorTable
4184            .ensureFieldAccessorsInitialized(
4185                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder.class);
4186      }
4187
4188      public static com.google.protobuf.Parser<XAttrCompactProto> PARSER =
4189          new com.google.protobuf.AbstractParser<XAttrCompactProto>() {
4190        public XAttrCompactProto parsePartialFrom(
4191            com.google.protobuf.CodedInputStream input,
4192            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4193            throws com.google.protobuf.InvalidProtocolBufferException {
4194          return new XAttrCompactProto(input, extensionRegistry);
4195        }
4196      };
4197
4198      @java.lang.Override
4199      public com.google.protobuf.Parser<XAttrCompactProto> getParserForType() {
4200        return PARSER;
4201      }
4202
4203      private int bitField0_;
4204      // required fixed32 name = 1;
4205      public static final int NAME_FIELD_NUMBER = 1;
4206      private int name_;
4207      /**
4208       * <code>required fixed32 name = 1;</code>
4209       *
4210       * <pre>
4211       **
4212       * 
4213       * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4214       * [2:26) -- the name of the entry, which is an ID that points to a
4215       * string in the StringTableSection. 
4216       * [26:32) -- reserved for future uses.
4217       * </pre>
4218       */
4219      public boolean hasName() {
4220        return ((bitField0_ & 0x00000001) == 0x00000001);
4221      }
4222      /**
4223       * <code>required fixed32 name = 1;</code>
4224       *
4225       * <pre>
4226       **
4227       * 
4228       * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4229       * [2:26) -- the name of the entry, which is an ID that points to a
4230       * string in the StringTableSection. 
4231       * [26:32) -- reserved for future uses.
4232       * </pre>
4233       */
4234      public int getName() {
4235        return name_;
4236      }
4237
4238      // optional bytes value = 2;
4239      public static final int VALUE_FIELD_NUMBER = 2;
4240      private com.google.protobuf.ByteString value_;
4241      /**
4242       * <code>optional bytes value = 2;</code>
4243       */
4244      public boolean hasValue() {
4245        return ((bitField0_ & 0x00000002) == 0x00000002);
4246      }
4247      /**
4248       * <code>optional bytes value = 2;</code>
4249       */
4250      public com.google.protobuf.ByteString getValue() {
4251        return value_;
4252      }
4253
4254      private void initFields() {
4255        name_ = 0;
4256        value_ = com.google.protobuf.ByteString.EMPTY;
4257      }
4258      private byte memoizedIsInitialized = -1;
4259      public final boolean isInitialized() {
4260        byte isInitialized = memoizedIsInitialized;
4261        if (isInitialized != -1) return isInitialized == 1;
4262
4263        if (!hasName()) {
4264          memoizedIsInitialized = 0;
4265          return false;
4266        }
4267        memoizedIsInitialized = 1;
4268        return true;
4269      }
4270
4271      public void writeTo(com.google.protobuf.CodedOutputStream output)
4272                          throws java.io.IOException {
4273        getSerializedSize();
4274        if (((bitField0_ & 0x00000001) == 0x00000001)) {
4275          output.writeFixed32(1, name_);
4276        }
4277        if (((bitField0_ & 0x00000002) == 0x00000002)) {
4278          output.writeBytes(2, value_);
4279        }
4280        getUnknownFields().writeTo(output);
4281      }
4282
4283      private int memoizedSerializedSize = -1;
4284      public int getSerializedSize() {
4285        int size = memoizedSerializedSize;
4286        if (size != -1) return size;
4287
4288        size = 0;
4289        if (((bitField0_ & 0x00000001) == 0x00000001)) {
4290          size += com.google.protobuf.CodedOutputStream
4291            .computeFixed32Size(1, name_);
4292        }
4293        if (((bitField0_ & 0x00000002) == 0x00000002)) {
4294          size += com.google.protobuf.CodedOutputStream
4295            .computeBytesSize(2, value_);
4296        }
4297        size += getUnknownFields().getSerializedSize();
4298        memoizedSerializedSize = size;
4299        return size;
4300      }
4301
4302      private static final long serialVersionUID = 0L;
4303      @java.lang.Override
4304      protected java.lang.Object writeReplace()
4305          throws java.io.ObjectStreamException {
4306        return super.writeReplace();
4307      }
4308
4309      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4310          com.google.protobuf.ByteString data)
4311          throws com.google.protobuf.InvalidProtocolBufferException {
4312        return PARSER.parseFrom(data);
4313      }
4314      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4315          com.google.protobuf.ByteString data,
4316          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4317          throws com.google.protobuf.InvalidProtocolBufferException {
4318        return PARSER.parseFrom(data, extensionRegistry);
4319      }
4320      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(byte[] data)
4321          throws com.google.protobuf.InvalidProtocolBufferException {
4322        return PARSER.parseFrom(data);
4323      }
4324      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4325          byte[] data,
4326          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4327          throws com.google.protobuf.InvalidProtocolBufferException {
4328        return PARSER.parseFrom(data, extensionRegistry);
4329      }
4330      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(java.io.InputStream input)
4331          throws java.io.IOException {
4332        return PARSER.parseFrom(input);
4333      }
4334      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4335          java.io.InputStream input,
4336          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4337          throws java.io.IOException {
4338        return PARSER.parseFrom(input, extensionRegistry);
4339      }
4340      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseDelimitedFrom(java.io.InputStream input)
4341          throws java.io.IOException {
4342        return PARSER.parseDelimitedFrom(input);
4343      }
4344      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseDelimitedFrom(
4345          java.io.InputStream input,
4346          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4347          throws java.io.IOException {
4348        return PARSER.parseDelimitedFrom(input, extensionRegistry);
4349      }
4350      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4351          com.google.protobuf.CodedInputStream input)
4352          throws java.io.IOException {
4353        return PARSER.parseFrom(input);
4354      }
4355      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parseFrom(
4356          com.google.protobuf.CodedInputStream input,
4357          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4358          throws java.io.IOException {
4359        return PARSER.parseFrom(input, extensionRegistry);
4360      }
4361
4362      public static Builder newBuilder() { return Builder.create(); }
4363      public Builder newBuilderForType() { return newBuilder(); }
4364      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto prototype) {
4365        return newBuilder().mergeFrom(prototype);
4366      }
4367      public Builder toBuilder() { return newBuilder(this); }
4368
4369      @java.lang.Override
4370      protected Builder newBuilderForType(
4371          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4372        Builder builder = new Builder(parent);
4373        return builder;
4374      }
4375      /**
4376       * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto}
4377       */
4378      public static final class Builder extends
4379          com.google.protobuf.GeneratedMessage.Builder<Builder>
4380         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder {
4381        public static final com.google.protobuf.Descriptors.Descriptor
4382            getDescriptor() {
4383          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor;
4384        }
4385
4386        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4387            internalGetFieldAccessorTable() {
4388          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_fieldAccessorTable
4389              .ensureFieldAccessorsInitialized(
4390                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder.class);
4391        }
4392
4393        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.newBuilder()
4394        private Builder() {
4395          maybeForceBuilderInitialization();
4396        }
4397
4398        private Builder(
4399            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4400          super(parent);
4401          maybeForceBuilderInitialization();
4402        }
4403        private void maybeForceBuilderInitialization() {
4404          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4405          }
4406        }
4407        private static Builder create() {
4408          return new Builder();
4409        }
4410
4411        public Builder clear() {
4412          super.clear();
4413          name_ = 0;
4414          bitField0_ = (bitField0_ & ~0x00000001);
4415          value_ = com.google.protobuf.ByteString.EMPTY;
4416          bitField0_ = (bitField0_ & ~0x00000002);
4417          return this;
4418        }
4419
4420        public Builder clone() {
4421          return create().mergeFrom(buildPartial());
4422        }
4423
4424        public com.google.protobuf.Descriptors.Descriptor
4425            getDescriptorForType() {
4426          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor;
4427        }
4428
4429        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto getDefaultInstanceForType() {
4430          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.getDefaultInstance();
4431        }
4432
4433        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto build() {
4434          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto result = buildPartial();
4435          if (!result.isInitialized()) {
4436            throw newUninitializedMessageException(result);
4437          }
4438          return result;
4439        }
4440
4441        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto buildPartial() {
4442          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto(this);
4443          int from_bitField0_ = bitField0_;
4444          int to_bitField0_ = 0;
4445          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4446            to_bitField0_ |= 0x00000001;
4447          }
4448          result.name_ = name_;
4449          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4450            to_bitField0_ |= 0x00000002;
4451          }
4452          result.value_ = value_;
4453          result.bitField0_ = to_bitField0_;
4454          onBuilt();
4455          return result;
4456        }
4457
4458        public Builder mergeFrom(com.google.protobuf.Message other) {
4459          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto) {
4460            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto)other);
4461          } else {
4462            super.mergeFrom(other);
4463            return this;
4464          }
4465        }
4466
4467        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto other) {
4468          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.getDefaultInstance()) return this;
4469          if (other.hasName()) {
4470            setName(other.getName());
4471          }
4472          if (other.hasValue()) {
4473            setValue(other.getValue());
4474          }
4475          this.mergeUnknownFields(other.getUnknownFields());
4476          return this;
4477        }
4478
4479        public final boolean isInitialized() {
4480          if (!hasName()) {
4481            
4482            return false;
4483          }
4484          return true;
4485        }
4486
4487        public Builder mergeFrom(
4488            com.google.protobuf.CodedInputStream input,
4489            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4490            throws java.io.IOException {
4491          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto parsedMessage = null;
4492          try {
4493            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4494          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4495            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto) e.getUnfinishedMessage();
4496            throw e;
4497          } finally {
4498            if (parsedMessage != null) {
4499              mergeFrom(parsedMessage);
4500            }
4501          }
4502          return this;
4503        }
4504        private int bitField0_;
4505
4506        // required fixed32 name = 1;
4507        private int name_ ;
4508        /**
4509         * <code>required fixed32 name = 1;</code>
4510         *
4511         * <pre>
4512         **
4513         * 
4514         * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4515         * [2:26) -- the name of the entry, which is an ID that points to a
4516         * string in the StringTableSection. 
4517         * [26:32) -- reserved for future uses.
4518         * </pre>
4519         */
4520        public boolean hasName() {
4521          return ((bitField0_ & 0x00000001) == 0x00000001);
4522        }
4523        /**
4524         * <code>required fixed32 name = 1;</code>
4525         *
4526         * <pre>
4527         **
4528         * 
4529         * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4530         * [2:26) -- the name of the entry, which is an ID that points to a
4531         * string in the StringTableSection. 
4532         * [26:32) -- reserved for future uses.
4533         * </pre>
4534         */
4535        public int getName() {
4536          return name_;
4537        }
4538        /**
4539         * <code>required fixed32 name = 1;</code>
4540         *
4541         * <pre>
4542         **
4543         * 
4544         * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4545         * [2:26) -- the name of the entry, which is an ID that points to a
4546         * string in the StringTableSection. 
4547         * [26:32) -- reserved for future uses.
4548         * </pre>
4549         */
4550        public Builder setName(int value) {
4551          bitField0_ |= 0x00000001;
4552          name_ = value;
4553          onChanged();
4554          return this;
4555        }
4556        /**
4557         * <code>required fixed32 name = 1;</code>
4558         *
4559         * <pre>
4560         **
4561         * 
4562         * [0:2) -- the namespace of XAttr (XAttrNamespaceProto)
4563         * [2:26) -- the name of the entry, which is an ID that points to a
4564         * string in the StringTableSection. 
4565         * [26:32) -- reserved for future uses.
4566         * </pre>
4567         */
4568        public Builder clearName() {
4569          bitField0_ = (bitField0_ & ~0x00000001);
4570          name_ = 0;
4571          onChanged();
4572          return this;
4573        }
4574
4575        // optional bytes value = 2;
4576        private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY;
4577        /**
4578         * <code>optional bytes value = 2;</code>
4579         */
4580        public boolean hasValue() {
4581          return ((bitField0_ & 0x00000002) == 0x00000002);
4582        }
4583        /**
4584         * <code>optional bytes value = 2;</code>
4585         */
4586        public com.google.protobuf.ByteString getValue() {
4587          return value_;
4588        }
4589        /**
4590         * <code>optional bytes value = 2;</code>
4591         */
4592        public Builder setValue(com.google.protobuf.ByteString value) {
4593          if (value == null) {
4594    throw new NullPointerException();
4595  }
4596  bitField0_ |= 0x00000002;
4597          value_ = value;
4598          onChanged();
4599          return this;
4600        }
4601        /**
4602         * <code>optional bytes value = 2;</code>
4603         */
4604        public Builder clearValue() {
4605          bitField0_ = (bitField0_ & ~0x00000002);
4606          value_ = getDefaultInstance().getValue();
4607          onChanged();
4608          return this;
4609        }
4610
4611        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto)
4612      }
4613
4614      static {
4615        defaultInstance = new XAttrCompactProto(true);
4616        defaultInstance.initFields();
4617      }
4618
4619      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto)
4620    }
4621
4622    public interface XAttrFeatureProtoOrBuilder
4623        extends com.google.protobuf.MessageOrBuilder {
4624
4625      // repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;
4626      /**
4627       * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4628       */
4629      java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> 
4630          getXAttrsList();
4631      /**
4632       * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4633       */
4634      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto getXAttrs(int index);
4635      /**
4636       * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4637       */
4638      int getXAttrsCount();
4639      /**
4640       * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4641       */
4642      java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder> 
4643          getXAttrsOrBuilderList();
4644      /**
4645       * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4646       */
4647      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder getXAttrsOrBuilder(
4648          int index);
4649    }
4650    /**
4651     * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto}
4652     */
4653    public static final class XAttrFeatureProto extends
4654        com.google.protobuf.GeneratedMessage
4655        implements XAttrFeatureProtoOrBuilder {
4656      // Use XAttrFeatureProto.newBuilder() to construct.
4657      private XAttrFeatureProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4658        super(builder);
4659        this.unknownFields = builder.getUnknownFields();
4660      }
4661      private XAttrFeatureProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4662
4663      private static final XAttrFeatureProto defaultInstance;
4664      public static XAttrFeatureProto getDefaultInstance() {
4665        return defaultInstance;
4666      }
4667
4668      public XAttrFeatureProto getDefaultInstanceForType() {
4669        return defaultInstance;
4670      }
4671
4672      private final com.google.protobuf.UnknownFieldSet unknownFields;
4673      @java.lang.Override
4674      public final com.google.protobuf.UnknownFieldSet
4675          getUnknownFields() {
4676        return this.unknownFields;
4677      }
4678      private XAttrFeatureProto(
4679          com.google.protobuf.CodedInputStream input,
4680          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4681          throws com.google.protobuf.InvalidProtocolBufferException {
4682        initFields();
4683        int mutable_bitField0_ = 0;
4684        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4685            com.google.protobuf.UnknownFieldSet.newBuilder();
4686        try {
4687          boolean done = false;
4688          while (!done) {
4689            int tag = input.readTag();
4690            switch (tag) {
4691              case 0:
4692                done = true;
4693                break;
4694              default: {
4695                if (!parseUnknownField(input, unknownFields,
4696                                       extensionRegistry, tag)) {
4697                  done = true;
4698                }
4699                break;
4700              }
4701              case 10: {
4702                if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4703                  xAttrs_ = new java.util.ArrayList<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto>();
4704                  mutable_bitField0_ |= 0x00000001;
4705                }
4706                xAttrs_.add(input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.PARSER, extensionRegistry));
4707                break;
4708              }
4709            }
4710          }
4711        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4712          throw e.setUnfinishedMessage(this);
4713        } catch (java.io.IOException e) {
4714          throw new com.google.protobuf.InvalidProtocolBufferException(
4715              e.getMessage()).setUnfinishedMessage(this);
4716        } finally {
4717          if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4718            xAttrs_ = java.util.Collections.unmodifiableList(xAttrs_);
4719          }
4720          this.unknownFields = unknownFields.build();
4721          makeExtensionsImmutable();
4722        }
4723      }
4724      public static final com.google.protobuf.Descriptors.Descriptor
4725          getDescriptor() {
4726        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor;
4727      }
4728
4729      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4730          internalGetFieldAccessorTable() {
4731        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_fieldAccessorTable
4732            .ensureFieldAccessorsInitialized(
4733                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder.class);
4734      }
4735
4736      public static com.google.protobuf.Parser<XAttrFeatureProto> PARSER =
4737          new com.google.protobuf.AbstractParser<XAttrFeatureProto>() {
4738        public XAttrFeatureProto parsePartialFrom(
4739            com.google.protobuf.CodedInputStream input,
4740            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4741            throws com.google.protobuf.InvalidProtocolBufferException {
4742          return new XAttrFeatureProto(input, extensionRegistry);
4743        }
4744      };
4745
4746      @java.lang.Override
4747      public com.google.protobuf.Parser<XAttrFeatureProto> getParserForType() {
4748        return PARSER;
4749      }
4750
4751      // repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;
4752      public static final int XATTRS_FIELD_NUMBER = 1;
4753      private java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> xAttrs_;
4754      /**
4755       * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4756       */
4757      public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> getXAttrsList() {
4758        return xAttrs_;
4759      }
4760      /**
4761       * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4762       */
4763      public java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder> 
4764          getXAttrsOrBuilderList() {
4765        return xAttrs_;
4766      }
4767      /**
4768       * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4769       */
4770      public int getXAttrsCount() {
4771        return xAttrs_.size();
4772      }
4773      /**
4774       * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4775       */
4776      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto getXAttrs(int index) {
4777        return xAttrs_.get(index);
4778      }
4779      /**
4780       * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
4781       */
4782      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder getXAttrsOrBuilder(
4783          int index) {
4784        return xAttrs_.get(index);
4785      }
4786
4787      private void initFields() {
4788        xAttrs_ = java.util.Collections.emptyList();
4789      }
4790      private byte memoizedIsInitialized = -1;
4791      public final boolean isInitialized() {
4792        byte isInitialized = memoizedIsInitialized;
4793        if (isInitialized != -1) return isInitialized == 1;
4794
4795        for (int i = 0; i < getXAttrsCount(); i++) {
4796          if (!getXAttrs(i).isInitialized()) {
4797            memoizedIsInitialized = 0;
4798            return false;
4799          }
4800        }
4801        memoizedIsInitialized = 1;
4802        return true;
4803      }
4804
4805      public void writeTo(com.google.protobuf.CodedOutputStream output)
4806                          throws java.io.IOException {
4807        getSerializedSize();
4808        for (int i = 0; i < xAttrs_.size(); i++) {
4809          output.writeMessage(1, xAttrs_.get(i));
4810        }
4811        getUnknownFields().writeTo(output);
4812      }
4813
4814      private int memoizedSerializedSize = -1;
4815      public int getSerializedSize() {
4816        int size = memoizedSerializedSize;
4817        if (size != -1) return size;
4818
4819        size = 0;
4820        for (int i = 0; i < xAttrs_.size(); i++) {
4821          size += com.google.protobuf.CodedOutputStream
4822            .computeMessageSize(1, xAttrs_.get(i));
4823        }
4824        size += getUnknownFields().getSerializedSize();
4825        memoizedSerializedSize = size;
4826        return size;
4827      }
4828
4829      private static final long serialVersionUID = 0L;
4830      @java.lang.Override
4831      protected java.lang.Object writeReplace()
4832          throws java.io.ObjectStreamException {
4833        return super.writeReplace();
4834      }
4835
4836      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4837          com.google.protobuf.ByteString data)
4838          throws com.google.protobuf.InvalidProtocolBufferException {
4839        return PARSER.parseFrom(data);
4840      }
4841      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4842          com.google.protobuf.ByteString data,
4843          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4844          throws com.google.protobuf.InvalidProtocolBufferException {
4845        return PARSER.parseFrom(data, extensionRegistry);
4846      }
4847      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(byte[] data)
4848          throws com.google.protobuf.InvalidProtocolBufferException {
4849        return PARSER.parseFrom(data);
4850      }
4851      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4852          byte[] data,
4853          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4854          throws com.google.protobuf.InvalidProtocolBufferException {
4855        return PARSER.parseFrom(data, extensionRegistry);
4856      }
4857      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(java.io.InputStream input)
4858          throws java.io.IOException {
4859        return PARSER.parseFrom(input);
4860      }
4861      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4862          java.io.InputStream input,
4863          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4864          throws java.io.IOException {
4865        return PARSER.parseFrom(input, extensionRegistry);
4866      }
4867      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseDelimitedFrom(java.io.InputStream input)
4868          throws java.io.IOException {
4869        return PARSER.parseDelimitedFrom(input);
4870      }
4871      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseDelimitedFrom(
4872          java.io.InputStream input,
4873          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4874          throws java.io.IOException {
4875        return PARSER.parseDelimitedFrom(input, extensionRegistry);
4876      }
4877      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4878          com.google.protobuf.CodedInputStream input)
4879          throws java.io.IOException {
4880        return PARSER.parseFrom(input);
4881      }
4882      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parseFrom(
4883          com.google.protobuf.CodedInputStream input,
4884          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4885          throws java.io.IOException {
4886        return PARSER.parseFrom(input, extensionRegistry);
4887      }
4888
4889      public static Builder newBuilder() { return Builder.create(); }
4890      public Builder newBuilderForType() { return newBuilder(); }
4891      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto prototype) {
4892        return newBuilder().mergeFrom(prototype);
4893      }
4894      public Builder toBuilder() { return newBuilder(this); }
4895
4896      @java.lang.Override
4897      protected Builder newBuilderForType(
4898          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4899        Builder builder = new Builder(parent);
4900        return builder;
4901      }
4902      /**
4903       * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto}
4904       */
4905      public static final class Builder extends
4906          com.google.protobuf.GeneratedMessage.Builder<Builder>
4907         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder {
4908        public static final com.google.protobuf.Descriptors.Descriptor
4909            getDescriptor() {
4910          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor;
4911        }
4912
4913        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4914            internalGetFieldAccessorTable() {
4915          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_fieldAccessorTable
4916              .ensureFieldAccessorsInitialized(
4917                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder.class);
4918        }
4919
4920        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.newBuilder()
4921        private Builder() {
4922          maybeForceBuilderInitialization();
4923        }
4924
4925        private Builder(
4926            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4927          super(parent);
4928          maybeForceBuilderInitialization();
4929        }
4930        private void maybeForceBuilderInitialization() {
4931          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4932            getXAttrsFieldBuilder();
4933          }
4934        }
4935        private static Builder create() {
4936          return new Builder();
4937        }
4938
4939        public Builder clear() {
4940          super.clear();
4941          if (xAttrsBuilder_ == null) {
4942            xAttrs_ = java.util.Collections.emptyList();
4943            bitField0_ = (bitField0_ & ~0x00000001);
4944          } else {
4945            xAttrsBuilder_.clear();
4946          }
4947          return this;
4948        }
4949
4950        public Builder clone() {
4951          return create().mergeFrom(buildPartial());
4952        }
4953
4954        public com.google.protobuf.Descriptors.Descriptor
4955            getDescriptorForType() {
4956          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor;
4957        }
4958
4959        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getDefaultInstanceForType() {
4960          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
4961        }
4962
4963        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto build() {
4964          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto result = buildPartial();
4965          if (!result.isInitialized()) {
4966            throw newUninitializedMessageException(result);
4967          }
4968          return result;
4969        }
4970
4971        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto buildPartial() {
4972          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto(this);
4973          int from_bitField0_ = bitField0_;
4974          if (xAttrsBuilder_ == null) {
4975            if (((bitField0_ & 0x00000001) == 0x00000001)) {
4976              xAttrs_ = java.util.Collections.unmodifiableList(xAttrs_);
4977              bitField0_ = (bitField0_ & ~0x00000001);
4978            }
4979            result.xAttrs_ = xAttrs_;
4980          } else {
4981            result.xAttrs_ = xAttrsBuilder_.build();
4982          }
4983          onBuilt();
4984          return result;
4985        }
4986
4987        public Builder mergeFrom(com.google.protobuf.Message other) {
4988          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto) {
4989            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto)other);
4990          } else {
4991            super.mergeFrom(other);
4992            return this;
4993          }
4994        }
4995
4996        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto other) {
4997          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance()) return this;
4998          if (xAttrsBuilder_ == null) {
4999            if (!other.xAttrs_.isEmpty()) {
5000              if (xAttrs_.isEmpty()) {
5001                xAttrs_ = other.xAttrs_;
5002                bitField0_ = (bitField0_ & ~0x00000001);
5003              } else {
5004                ensureXAttrsIsMutable();
5005                xAttrs_.addAll(other.xAttrs_);
5006              }
5007              onChanged();
5008            }
5009          } else {
5010            if (!other.xAttrs_.isEmpty()) {
5011              if (xAttrsBuilder_.isEmpty()) {
5012                xAttrsBuilder_.dispose();
5013                xAttrsBuilder_ = null;
5014                xAttrs_ = other.xAttrs_;
5015                bitField0_ = (bitField0_ & ~0x00000001);
5016                xAttrsBuilder_ = 
5017                  com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
5018                     getXAttrsFieldBuilder() : null;
5019              } else {
5020                xAttrsBuilder_.addAllMessages(other.xAttrs_);
5021              }
5022            }
5023          }
5024          this.mergeUnknownFields(other.getUnknownFields());
5025          return this;
5026        }
5027
5028        public final boolean isInitialized() {
5029          for (int i = 0; i < getXAttrsCount(); i++) {
5030            if (!getXAttrs(i).isInitialized()) {
5031              
5032              return false;
5033            }
5034          }
5035          return true;
5036        }
5037
5038        public Builder mergeFrom(
5039            com.google.protobuf.CodedInputStream input,
5040            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5041            throws java.io.IOException {
5042          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto parsedMessage = null;
5043          try {
5044            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5045          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5046            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto) e.getUnfinishedMessage();
5047            throw e;
5048          } finally {
5049            if (parsedMessage != null) {
5050              mergeFrom(parsedMessage);
5051            }
5052          }
5053          return this;
5054        }
5055        private int bitField0_;
5056
5057        // repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;
5058        private java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> xAttrs_ =
5059          java.util.Collections.emptyList();
5060        private void ensureXAttrsIsMutable() {
5061          if (!((bitField0_ & 0x00000001) == 0x00000001)) {
5062            xAttrs_ = new java.util.ArrayList<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto>(xAttrs_);
5063            bitField0_ |= 0x00000001;
5064           }
5065        }
5066
5067        private com.google.protobuf.RepeatedFieldBuilder<
5068            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder> xAttrsBuilder_;
5069
5070        /**
5071         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5072         */
5073        public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> getXAttrsList() {
5074          if (xAttrsBuilder_ == null) {
5075            return java.util.Collections.unmodifiableList(xAttrs_);
5076          } else {
5077            return xAttrsBuilder_.getMessageList();
5078          }
5079        }
5080        /**
5081         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5082         */
5083        public int getXAttrsCount() {
5084          if (xAttrsBuilder_ == null) {
5085            return xAttrs_.size();
5086          } else {
5087            return xAttrsBuilder_.getCount();
5088          }
5089        }
5090        /**
5091         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5092         */
5093        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto getXAttrs(int index) {
5094          if (xAttrsBuilder_ == null) {
5095            return xAttrs_.get(index);
5096          } else {
5097            return xAttrsBuilder_.getMessage(index);
5098          }
5099        }
5100        /**
5101         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5102         */
5103        public Builder setXAttrs(
5104            int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto value) {
5105          if (xAttrsBuilder_ == null) {
5106            if (value == null) {
5107              throw new NullPointerException();
5108            }
5109            ensureXAttrsIsMutable();
5110            xAttrs_.set(index, value);
5111            onChanged();
5112          } else {
5113            xAttrsBuilder_.setMessage(index, value);
5114          }
5115          return this;
5116        }
5117        /**
5118         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5119         */
5120        public Builder setXAttrs(
5121            int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder builderForValue) {
5122          if (xAttrsBuilder_ == null) {
5123            ensureXAttrsIsMutable();
5124            xAttrs_.set(index, builderForValue.build());
5125            onChanged();
5126          } else {
5127            xAttrsBuilder_.setMessage(index, builderForValue.build());
5128          }
5129          return this;
5130        }
5131        /**
5132         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5133         */
5134        public Builder addXAttrs(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto value) {
5135          if (xAttrsBuilder_ == null) {
5136            if (value == null) {
5137              throw new NullPointerException();
5138            }
5139            ensureXAttrsIsMutable();
5140            xAttrs_.add(value);
5141            onChanged();
5142          } else {
5143            xAttrsBuilder_.addMessage(value);
5144          }
5145          return this;
5146        }
5147        /**
5148         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5149         */
5150        public Builder addXAttrs(
5151            int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto value) {
5152          if (xAttrsBuilder_ == null) {
5153            if (value == null) {
5154              throw new NullPointerException();
5155            }
5156            ensureXAttrsIsMutable();
5157            xAttrs_.add(index, value);
5158            onChanged();
5159          } else {
5160            xAttrsBuilder_.addMessage(index, value);
5161          }
5162          return this;
5163        }
5164        /**
5165         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5166         */
5167        public Builder addXAttrs(
5168            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder builderForValue) {
5169          if (xAttrsBuilder_ == null) {
5170            ensureXAttrsIsMutable();
5171            xAttrs_.add(builderForValue.build());
5172            onChanged();
5173          } else {
5174            xAttrsBuilder_.addMessage(builderForValue.build());
5175          }
5176          return this;
5177        }
5178        /**
5179         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5180         */
5181        public Builder addXAttrs(
5182            int index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder builderForValue) {
5183          if (xAttrsBuilder_ == null) {
5184            ensureXAttrsIsMutable();
5185            xAttrs_.add(index, builderForValue.build());
5186            onChanged();
5187          } else {
5188            xAttrsBuilder_.addMessage(index, builderForValue.build());
5189          }
5190          return this;
5191        }
5192        /**
5193         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5194         */
5195        public Builder addAllXAttrs(
5196            java.lang.Iterable<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto> values) {
5197          if (xAttrsBuilder_ == null) {
5198            ensureXAttrsIsMutable();
5199            super.addAll(values, xAttrs_);
5200            onChanged();
5201          } else {
5202            xAttrsBuilder_.addAllMessages(values);
5203          }
5204          return this;
5205        }
5206        /**
5207         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5208         */
5209        public Builder clearXAttrs() {
5210          if (xAttrsBuilder_ == null) {
5211            xAttrs_ = java.util.Collections.emptyList();
5212            bitField0_ = (bitField0_ & ~0x00000001);
5213            onChanged();
5214          } else {
5215            xAttrsBuilder_.clear();
5216          }
5217          return this;
5218        }
5219        /**
5220         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5221         */
5222        public Builder removeXAttrs(int index) {
5223          if (xAttrsBuilder_ == null) {
5224            ensureXAttrsIsMutable();
5225            xAttrs_.remove(index);
5226            onChanged();
5227          } else {
5228            xAttrsBuilder_.remove(index);
5229          }
5230          return this;
5231        }
5232        /**
5233         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5234         */
5235        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder getXAttrsBuilder(
5236            int index) {
5237          return getXAttrsFieldBuilder().getBuilder(index);
5238        }
5239        /**
5240         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5241         */
5242        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder getXAttrsOrBuilder(
5243            int index) {
5244          if (xAttrsBuilder_ == null) {
5245            return xAttrs_.get(index);  } else {
5246            return xAttrsBuilder_.getMessageOrBuilder(index);
5247          }
5248        }
5249        /**
5250         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5251         */
5252        public java.util.List<? extends org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder> 
5253             getXAttrsOrBuilderList() {
5254          if (xAttrsBuilder_ != null) {
5255            return xAttrsBuilder_.getMessageOrBuilderList();
5256          } else {
5257            return java.util.Collections.unmodifiableList(xAttrs_);
5258          }
5259        }
5260        /**
5261         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5262         */
5263        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder addXAttrsBuilder() {
5264          return getXAttrsFieldBuilder().addBuilder(
5265              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.getDefaultInstance());
5266        }
5267        /**
5268         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5269         */
5270        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder addXAttrsBuilder(
5271            int index) {
5272          return getXAttrsFieldBuilder().addBuilder(
5273              index, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.getDefaultInstance());
5274        }
5275        /**
5276         * <code>repeated .hadoop.hdfs.fsimage.INodeSection.XAttrCompactProto xAttrs = 1;</code>
5277         */
5278        public java.util.List<org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder> 
5279             getXAttrsBuilderList() {
5280          return getXAttrsFieldBuilder().getBuilderList();
5281        }
5282        private com.google.protobuf.RepeatedFieldBuilder<
5283            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder> 
5284            getXAttrsFieldBuilder() {
5285          if (xAttrsBuilder_ == null) {
5286            xAttrsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5287                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProtoOrBuilder>(
5288                    xAttrs_,
5289                    ((bitField0_ & 0x00000001) == 0x00000001),
5290                    getParentForChildren(),
5291                    isClean());
5292            xAttrs_ = null;
5293          }
5294          return xAttrsBuilder_;
5295        }
5296
5297        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto)
5298      }
5299
5300      static {
5301        defaultInstance = new XAttrFeatureProto(true);
5302        defaultInstance.initFields();
5303      }
5304
5305      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto)
5306    }
5307
5308    public interface INodeFileOrBuilder
5309        extends com.google.protobuf.MessageOrBuilder {
5310
5311      // optional uint32 replication = 1;
5312      /**
5313       * <code>optional uint32 replication = 1;</code>
5314       */
5315      boolean hasReplication();
5316      /**
5317       * <code>optional uint32 replication = 1;</code>
5318       */
5319      int getReplication();
5320
5321      // optional uint64 modificationTime = 2;
5322      /**
5323       * <code>optional uint64 modificationTime = 2;</code>
5324       */
5325      boolean hasModificationTime();
5326      /**
5327       * <code>optional uint64 modificationTime = 2;</code>
5328       */
5329      long getModificationTime();
5330
5331      // optional uint64 accessTime = 3;
5332      /**
5333       * <code>optional uint64 accessTime = 3;</code>
5334       */
5335      boolean hasAccessTime();
5336      /**
5337       * <code>optional uint64 accessTime = 3;</code>
5338       */
5339      long getAccessTime();
5340
5341      // optional uint64 preferredBlockSize = 4;
5342      /**
5343       * <code>optional uint64 preferredBlockSize = 4;</code>
5344       */
5345      boolean hasPreferredBlockSize();
5346      /**
5347       * <code>optional uint64 preferredBlockSize = 4;</code>
5348       */
5349      long getPreferredBlockSize();
5350
5351      // optional fixed64 permission = 5;
5352      /**
5353       * <code>optional fixed64 permission = 5;</code>
5354       */
5355      boolean hasPermission();
5356      /**
5357       * <code>optional fixed64 permission = 5;</code>
5358       */
5359      long getPermission();
5360
5361      // repeated .hadoop.hdfs.BlockProto blocks = 6;
5362      /**
5363       * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5364       */
5365      java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> 
5366          getBlocksList();
5367      /**
5368       * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5369       */
5370      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlocks(int index);
5371      /**
5372       * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5373       */
5374      int getBlocksCount();
5375      /**
5376       * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5377       */
5378      java.util.List<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
5379          getBlocksOrBuilderList();
5380      /**
5381       * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5382       */
5383      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlocksOrBuilder(
5384          int index);
5385
5386      // optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;
5387      /**
5388       * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5389       */
5390      boolean hasFileUC();
5391      /**
5392       * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5393       */
5394      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getFileUC();
5395      /**
5396       * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5397       */
5398      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder getFileUCOrBuilder();
5399
5400      // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;
5401      /**
5402       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5403       */
5404      boolean hasAcl();
5405      /**
5406       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5407       */
5408      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl();
5409      /**
5410       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5411       */
5412      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder();
5413
5414      // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;
5415      /**
5416       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5417       */
5418      boolean hasXAttrs();
5419      /**
5420       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5421       */
5422      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs();
5423      /**
5424       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5425       */
5426      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder();
5427    }
5428    /**
5429     * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeFile}
5430     */
5431    public static final class INodeFile extends
5432        com.google.protobuf.GeneratedMessage
5433        implements INodeFileOrBuilder {
5434      // Use INodeFile.newBuilder() to construct.
5435      private INodeFile(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5436        super(builder);
5437        this.unknownFields = builder.getUnknownFields();
5438      }
5439      private INodeFile(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5440
5441      private static final INodeFile defaultInstance;
5442      public static INodeFile getDefaultInstance() {
5443        return defaultInstance;
5444      }
5445
5446      public INodeFile getDefaultInstanceForType() {
5447        return defaultInstance;
5448      }
5449
5450      private final com.google.protobuf.UnknownFieldSet unknownFields;
5451      @java.lang.Override
5452      public final com.google.protobuf.UnknownFieldSet
5453          getUnknownFields() {
5454        return this.unknownFields;
5455      }
5456      private INodeFile(
5457          com.google.protobuf.CodedInputStream input,
5458          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5459          throws com.google.protobuf.InvalidProtocolBufferException {
5460        initFields();
5461        int mutable_bitField0_ = 0;
5462        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5463            com.google.protobuf.UnknownFieldSet.newBuilder();
5464        try {
5465          boolean done = false;
5466          while (!done) {
5467            int tag = input.readTag();
5468            switch (tag) {
5469              case 0:
5470                done = true;
5471                break;
5472              default: {
5473                if (!parseUnknownField(input, unknownFields,
5474                                       extensionRegistry, tag)) {
5475                  done = true;
5476                }
5477                break;
5478              }
5479              case 8: {
5480                bitField0_ |= 0x00000001;
5481                replication_ = input.readUInt32();
5482                break;
5483              }
5484              case 16: {
5485                bitField0_ |= 0x00000002;
5486                modificationTime_ = input.readUInt64();
5487                break;
5488              }
5489              case 24: {
5490                bitField0_ |= 0x00000004;
5491                accessTime_ = input.readUInt64();
5492                break;
5493              }
5494              case 32: {
5495                bitField0_ |= 0x00000008;
5496                preferredBlockSize_ = input.readUInt64();
5497                break;
5498              }
5499              case 41: {
5500                bitField0_ |= 0x00000010;
5501                permission_ = input.readFixed64();
5502                break;
5503              }
5504              case 50: {
5505                if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
5506                  blocks_ = new java.util.ArrayList<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto>();
5507                  mutable_bitField0_ |= 0x00000020;
5508                }
5509                blocks_.add(input.readMessage(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.PARSER, extensionRegistry));
5510                break;
5511              }
5512              case 58: {
5513                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder subBuilder = null;
5514                if (((bitField0_ & 0x00000020) == 0x00000020)) {
5515                  subBuilder = fileUC_.toBuilder();
5516                }
5517                fileUC_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.PARSER, extensionRegistry);
5518                if (subBuilder != null) {
5519                  subBuilder.mergeFrom(fileUC_);
5520                  fileUC_ = subBuilder.buildPartial();
5521                }
5522                bitField0_ |= 0x00000020;
5523                break;
5524              }
5525              case 66: {
5526                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder subBuilder = null;
5527                if (((bitField0_ & 0x00000040) == 0x00000040)) {
5528                  subBuilder = acl_.toBuilder();
5529                }
5530                acl_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.PARSER, extensionRegistry);
5531                if (subBuilder != null) {
5532                  subBuilder.mergeFrom(acl_);
5533                  acl_ = subBuilder.buildPartial();
5534                }
5535                bitField0_ |= 0x00000040;
5536                break;
5537              }
5538              case 74: {
5539                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder subBuilder = null;
5540                if (((bitField0_ & 0x00000080) == 0x00000080)) {
5541                  subBuilder = xAttrs_.toBuilder();
5542                }
5543                xAttrs_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.PARSER, extensionRegistry);
5544                if (subBuilder != null) {
5545                  subBuilder.mergeFrom(xAttrs_);
5546                  xAttrs_ = subBuilder.buildPartial();
5547                }
5548                bitField0_ |= 0x00000080;
5549                break;
5550              }
5551            }
5552          }
5553        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5554          throw e.setUnfinishedMessage(this);
5555        } catch (java.io.IOException e) {
5556          throw new com.google.protobuf.InvalidProtocolBufferException(
5557              e.getMessage()).setUnfinishedMessage(this);
5558        } finally {
5559          if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
5560            blocks_ = java.util.Collections.unmodifiableList(blocks_);
5561          }
5562          this.unknownFields = unknownFields.build();
5563          makeExtensionsImmutable();
5564        }
5565      }
5566      public static final com.google.protobuf.Descriptors.Descriptor
5567          getDescriptor() {
5568        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
5569      }
5570
5571      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5572          internalGetFieldAccessorTable() {
5573        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable
5574            .ensureFieldAccessorsInitialized(
5575                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder.class);
5576      }
5577
5578      public static com.google.protobuf.Parser<INodeFile> PARSER =
5579          new com.google.protobuf.AbstractParser<INodeFile>() {
5580        public INodeFile parsePartialFrom(
5581            com.google.protobuf.CodedInputStream input,
5582            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5583            throws com.google.protobuf.InvalidProtocolBufferException {
5584          return new INodeFile(input, extensionRegistry);
5585        }
5586      };
5587
5588      @java.lang.Override
5589      public com.google.protobuf.Parser<INodeFile> getParserForType() {
5590        return PARSER;
5591      }
5592
5593      private int bitField0_;
5594      // optional uint32 replication = 1;
5595      public static final int REPLICATION_FIELD_NUMBER = 1;
5596      private int replication_;
5597      /**
5598       * <code>optional uint32 replication = 1;</code>
5599       */
5600      public boolean hasReplication() {
5601        return ((bitField0_ & 0x00000001) == 0x00000001);
5602      }
5603      /**
5604       * <code>optional uint32 replication = 1;</code>
5605       */
5606      public int getReplication() {
5607        return replication_;
5608      }
5609
5610      // optional uint64 modificationTime = 2;
5611      public static final int MODIFICATIONTIME_FIELD_NUMBER = 2;
5612      private long modificationTime_;
5613      /**
5614       * <code>optional uint64 modificationTime = 2;</code>
5615       */
5616      public boolean hasModificationTime() {
5617        return ((bitField0_ & 0x00000002) == 0x00000002);
5618      }
5619      /**
5620       * <code>optional uint64 modificationTime = 2;</code>
5621       */
5622      public long getModificationTime() {
5623        return modificationTime_;
5624      }
5625
5626      // optional uint64 accessTime = 3;
5627      public static final int ACCESSTIME_FIELD_NUMBER = 3;
5628      private long accessTime_;
5629      /**
5630       * <code>optional uint64 accessTime = 3;</code>
5631       */
5632      public boolean hasAccessTime() {
5633        return ((bitField0_ & 0x00000004) == 0x00000004);
5634      }
5635      /**
5636       * <code>optional uint64 accessTime = 3;</code>
5637       */
5638      public long getAccessTime() {
5639        return accessTime_;
5640      }
5641
5642      // optional uint64 preferredBlockSize = 4;
5643      public static final int PREFERREDBLOCKSIZE_FIELD_NUMBER = 4;
5644      private long preferredBlockSize_;
5645      /**
5646       * <code>optional uint64 preferredBlockSize = 4;</code>
5647       */
5648      public boolean hasPreferredBlockSize() {
5649        return ((bitField0_ & 0x00000008) == 0x00000008);
5650      }
5651      /**
5652       * <code>optional uint64 preferredBlockSize = 4;</code>
5653       */
5654      public long getPreferredBlockSize() {
5655        return preferredBlockSize_;
5656      }
5657
5658      // optional fixed64 permission = 5;
5659      public static final int PERMISSION_FIELD_NUMBER = 5;
5660      private long permission_;
5661      /**
5662       * <code>optional fixed64 permission = 5;</code>
5663       */
5664      public boolean hasPermission() {
5665        return ((bitField0_ & 0x00000010) == 0x00000010);
5666      }
5667      /**
5668       * <code>optional fixed64 permission = 5;</code>
5669       */
5670      public long getPermission() {
5671        return permission_;
5672      }
5673
5674      // repeated .hadoop.hdfs.BlockProto blocks = 6;
5675      public static final int BLOCKS_FIELD_NUMBER = 6;
5676      private java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> blocks_;
5677      /**
5678       * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5679       */
5680      public java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> getBlocksList() {
5681        return blocks_;
5682      }
5683      /**
5684       * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5685       */
5686      public java.util.List<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
5687          getBlocksOrBuilderList() {
5688        return blocks_;
5689      }
5690      /**
5691       * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5692       */
5693      public int getBlocksCount() {
5694        return blocks_.size();
5695      }
5696      /**
5697       * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5698       */
5699      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlocks(int index) {
5700        return blocks_.get(index);
5701      }
5702      /**
5703       * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
5704       */
5705      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlocksOrBuilder(
5706          int index) {
5707        return blocks_.get(index);
5708      }
5709
5710      // optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;
5711      public static final int FILEUC_FIELD_NUMBER = 7;
5712      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature fileUC_;
5713      /**
5714       * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5715       */
5716      public boolean hasFileUC() {
5717        return ((bitField0_ & 0x00000020) == 0x00000020);
5718      }
5719      /**
5720       * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5721       */
5722      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getFileUC() {
5723        return fileUC_;
5724      }
5725      /**
5726       * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
5727       */
5728      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder getFileUCOrBuilder() {
5729        return fileUC_;
5730      }
5731
5732      // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;
5733      public static final int ACL_FIELD_NUMBER = 8;
5734      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_;
5735      /**
5736       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5737       */
5738      public boolean hasAcl() {
5739        return ((bitField0_ & 0x00000040) == 0x00000040);
5740      }
5741      /**
5742       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5743       */
5744      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
5745        return acl_;
5746      }
5747      /**
5748       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
5749       */
5750      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
5751        return acl_;
5752      }
5753
5754      // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;
5755      public static final int XATTRS_FIELD_NUMBER = 9;
5756      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto xAttrs_;
5757      /**
5758       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5759       */
5760      public boolean hasXAttrs() {
5761        return ((bitField0_ & 0x00000080) == 0x00000080);
5762      }
5763      /**
5764       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5765       */
5766      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs() {
5767        return xAttrs_;
5768      }
5769      /**
5770       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
5771       */
5772      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder() {
5773        return xAttrs_;
5774      }
5775
5776      private void initFields() {
5777        replication_ = 0;
5778        modificationTime_ = 0L;
5779        accessTime_ = 0L;
5780        preferredBlockSize_ = 0L;
5781        permission_ = 0L;
5782        blocks_ = java.util.Collections.emptyList();
5783        fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
5784        acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
5785        xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
5786      }
5787      private byte memoizedIsInitialized = -1;
5788      public final boolean isInitialized() {
5789        byte isInitialized = memoizedIsInitialized;
5790        if (isInitialized != -1) return isInitialized == 1;
5791
5792        for (int i = 0; i < getBlocksCount(); i++) {
5793          if (!getBlocks(i).isInitialized()) {
5794            memoizedIsInitialized = 0;
5795            return false;
5796          }
5797        }
5798        if (hasXAttrs()) {
5799          if (!getXAttrs().isInitialized()) {
5800            memoizedIsInitialized = 0;
5801            return false;
5802          }
5803        }
5804        memoizedIsInitialized = 1;
5805        return true;
5806      }
5807
5808      public void writeTo(com.google.protobuf.CodedOutputStream output)
5809                          throws java.io.IOException {
5810        getSerializedSize();
5811        if (((bitField0_ & 0x00000001) == 0x00000001)) {
5812          output.writeUInt32(1, replication_);
5813        }
5814        if (((bitField0_ & 0x00000002) == 0x00000002)) {
5815          output.writeUInt64(2, modificationTime_);
5816        }
5817        if (((bitField0_ & 0x00000004) == 0x00000004)) {
5818          output.writeUInt64(3, accessTime_);
5819        }
5820        if (((bitField0_ & 0x00000008) == 0x00000008)) {
5821          output.writeUInt64(4, preferredBlockSize_);
5822        }
5823        if (((bitField0_ & 0x00000010) == 0x00000010)) {
5824          output.writeFixed64(5, permission_);
5825        }
5826        for (int i = 0; i < blocks_.size(); i++) {
5827          output.writeMessage(6, blocks_.get(i));
5828        }
5829        if (((bitField0_ & 0x00000020) == 0x00000020)) {
5830          output.writeMessage(7, fileUC_);
5831        }
5832        if (((bitField0_ & 0x00000040) == 0x00000040)) {
5833          output.writeMessage(8, acl_);
5834        }
5835        if (((bitField0_ & 0x00000080) == 0x00000080)) {
5836          output.writeMessage(9, xAttrs_);
5837        }
5838        getUnknownFields().writeTo(output);
5839      }
5840
5841      private int memoizedSerializedSize = -1;
5842      public int getSerializedSize() {
5843        int size = memoizedSerializedSize;
5844        if (size != -1) return size;
5845
5846        size = 0;
5847        if (((bitField0_ & 0x00000001) == 0x00000001)) {
5848          size += com.google.protobuf.CodedOutputStream
5849            .computeUInt32Size(1, replication_);
5850        }
5851        if (((bitField0_ & 0x00000002) == 0x00000002)) {
5852          size += com.google.protobuf.CodedOutputStream
5853            .computeUInt64Size(2, modificationTime_);
5854        }
5855        if (((bitField0_ & 0x00000004) == 0x00000004)) {
5856          size += com.google.protobuf.CodedOutputStream
5857            .computeUInt64Size(3, accessTime_);
5858        }
5859        if (((bitField0_ & 0x00000008) == 0x00000008)) {
5860          size += com.google.protobuf.CodedOutputStream
5861            .computeUInt64Size(4, preferredBlockSize_);
5862        }
5863        if (((bitField0_ & 0x00000010) == 0x00000010)) {
5864          size += com.google.protobuf.CodedOutputStream
5865            .computeFixed64Size(5, permission_);
5866        }
5867        for (int i = 0; i < blocks_.size(); i++) {
5868          size += com.google.protobuf.CodedOutputStream
5869            .computeMessageSize(6, blocks_.get(i));
5870        }
5871        if (((bitField0_ & 0x00000020) == 0x00000020)) {
5872          size += com.google.protobuf.CodedOutputStream
5873            .computeMessageSize(7, fileUC_);
5874        }
5875        if (((bitField0_ & 0x00000040) == 0x00000040)) {
5876          size += com.google.protobuf.CodedOutputStream
5877            .computeMessageSize(8, acl_);
5878        }
5879        if (((bitField0_ & 0x00000080) == 0x00000080)) {
5880          size += com.google.protobuf.CodedOutputStream
5881            .computeMessageSize(9, xAttrs_);
5882        }
5883        size += getUnknownFields().getSerializedSize();
5884        memoizedSerializedSize = size;
5885        return size;
5886      }
5887
5888      private static final long serialVersionUID = 0L;
5889      @java.lang.Override
5890      protected java.lang.Object writeReplace()
5891          throws java.io.ObjectStreamException {
5892        return super.writeReplace();
5893      }
5894
5895      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
5896          com.google.protobuf.ByteString data)
5897          throws com.google.protobuf.InvalidProtocolBufferException {
5898        return PARSER.parseFrom(data);
5899      }
5900      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
5901          com.google.protobuf.ByteString data,
5902          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5903          throws com.google.protobuf.InvalidProtocolBufferException {
5904        return PARSER.parseFrom(data, extensionRegistry);
5905      }
5906      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(byte[] data)
5907          throws com.google.protobuf.InvalidProtocolBufferException {
5908        return PARSER.parseFrom(data);
5909      }
5910      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
5911          byte[] data,
5912          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5913          throws com.google.protobuf.InvalidProtocolBufferException {
5914        return PARSER.parseFrom(data, extensionRegistry);
5915      }
5916      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(java.io.InputStream input)
5917          throws java.io.IOException {
5918        return PARSER.parseFrom(input);
5919      }
5920      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
5921          java.io.InputStream input,
5922          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5923          throws java.io.IOException {
5924        return PARSER.parseFrom(input, extensionRegistry);
5925      }
5926      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseDelimitedFrom(java.io.InputStream input)
5927          throws java.io.IOException {
5928        return PARSER.parseDelimitedFrom(input);
5929      }
5930      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseDelimitedFrom(
5931          java.io.InputStream input,
5932          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5933          throws java.io.IOException {
5934        return PARSER.parseDelimitedFrom(input, extensionRegistry);
5935      }
5936      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
5937          com.google.protobuf.CodedInputStream input)
5938          throws java.io.IOException {
5939        return PARSER.parseFrom(input);
5940      }
5941      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parseFrom(
5942          com.google.protobuf.CodedInputStream input,
5943          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5944          throws java.io.IOException {
5945        return PARSER.parseFrom(input, extensionRegistry);
5946      }
5947
5948      public static Builder newBuilder() { return Builder.create(); }
5949      public Builder newBuilderForType() { return newBuilder(); }
5950      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile prototype) {
5951        return newBuilder().mergeFrom(prototype);
5952      }
5953      public Builder toBuilder() { return newBuilder(this); }
5954
5955      @java.lang.Override
5956      protected Builder newBuilderForType(
5957          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5958        Builder builder = new Builder(parent);
5959        return builder;
5960      }
5961      /**
5962       * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeFile}
5963       */
5964      public static final class Builder extends
5965          com.google.protobuf.GeneratedMessage.Builder<Builder>
5966         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder {
5967        public static final com.google.protobuf.Descriptors.Descriptor
5968            getDescriptor() {
5969          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
5970        }
5971
5972        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5973            internalGetFieldAccessorTable() {
5974          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable
5975              .ensureFieldAccessorsInitialized(
5976                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder.class);
5977        }
5978
5979        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.newBuilder()
5980        private Builder() {
5981          maybeForceBuilderInitialization();
5982        }
5983
5984        private Builder(
5985            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5986          super(parent);
5987          maybeForceBuilderInitialization();
5988        }
5989        private void maybeForceBuilderInitialization() {
5990          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5991            getBlocksFieldBuilder();
5992            getFileUCFieldBuilder();
5993            getAclFieldBuilder();
5994            getXAttrsFieldBuilder();
5995          }
5996        }
5997        private static Builder create() {
5998          return new Builder();
5999        }
6000
6001        public Builder clear() {
6002          super.clear();
6003          replication_ = 0;
6004          bitField0_ = (bitField0_ & ~0x00000001);
6005          modificationTime_ = 0L;
6006          bitField0_ = (bitField0_ & ~0x00000002);
6007          accessTime_ = 0L;
6008          bitField0_ = (bitField0_ & ~0x00000004);
6009          preferredBlockSize_ = 0L;
6010          bitField0_ = (bitField0_ & ~0x00000008);
6011          permission_ = 0L;
6012          bitField0_ = (bitField0_ & ~0x00000010);
6013          if (blocksBuilder_ == null) {
6014            blocks_ = java.util.Collections.emptyList();
6015            bitField0_ = (bitField0_ & ~0x00000020);
6016          } else {
6017            blocksBuilder_.clear();
6018          }
6019          if (fileUCBuilder_ == null) {
6020            fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
6021          } else {
6022            fileUCBuilder_.clear();
6023          }
6024          bitField0_ = (bitField0_ & ~0x00000040);
6025          if (aclBuilder_ == null) {
6026            acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
6027          } else {
6028            aclBuilder_.clear();
6029          }
6030          bitField0_ = (bitField0_ & ~0x00000080);
6031          if (xAttrsBuilder_ == null) {
6032            xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
6033          } else {
6034            xAttrsBuilder_.clear();
6035          }
6036          bitField0_ = (bitField0_ & ~0x00000100);
6037          return this;
6038        }
6039
6040        public Builder clone() {
6041          return create().mergeFrom(buildPartial());
6042        }
6043
6044        public com.google.protobuf.Descriptors.Descriptor
6045            getDescriptorForType() {
6046          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
6047        }
6048
6049        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getDefaultInstanceForType() {
6050          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
6051        }
6052
6053        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile build() {
6054          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile result = buildPartial();
6055          if (!result.isInitialized()) {
6056            throw newUninitializedMessageException(result);
6057          }
6058          return result;
6059        }
6060
6061        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile buildPartial() {
6062          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile(this);
6063          int from_bitField0_ = bitField0_;
6064          int to_bitField0_ = 0;
6065          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6066            to_bitField0_ |= 0x00000001;
6067          }
6068          result.replication_ = replication_;
6069          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
6070            to_bitField0_ |= 0x00000002;
6071          }
6072          result.modificationTime_ = modificationTime_;
6073          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
6074            to_bitField0_ |= 0x00000004;
6075          }
6076          result.accessTime_ = accessTime_;
6077          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
6078            to_bitField0_ |= 0x00000008;
6079          }
6080          result.preferredBlockSize_ = preferredBlockSize_;
6081          if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
6082            to_bitField0_ |= 0x00000010;
6083          }
6084          result.permission_ = permission_;
6085          if (blocksBuilder_ == null) {
6086            if (((bitField0_ & 0x00000020) == 0x00000020)) {
6087              blocks_ = java.util.Collections.unmodifiableList(blocks_);
6088              bitField0_ = (bitField0_ & ~0x00000020);
6089            }
6090            result.blocks_ = blocks_;
6091          } else {
6092            result.blocks_ = blocksBuilder_.build();
6093          }
6094          if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
6095            to_bitField0_ |= 0x00000020;
6096          }
6097          if (fileUCBuilder_ == null) {
6098            result.fileUC_ = fileUC_;
6099          } else {
6100            result.fileUC_ = fileUCBuilder_.build();
6101          }
6102          if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
6103            to_bitField0_ |= 0x00000040;
6104          }
6105          if (aclBuilder_ == null) {
6106            result.acl_ = acl_;
6107          } else {
6108            result.acl_ = aclBuilder_.build();
6109          }
6110          if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
6111            to_bitField0_ |= 0x00000080;
6112          }
6113          if (xAttrsBuilder_ == null) {
6114            result.xAttrs_ = xAttrs_;
6115          } else {
6116            result.xAttrs_ = xAttrsBuilder_.build();
6117          }
6118          result.bitField0_ = to_bitField0_;
6119          onBuilt();
6120          return result;
6121        }
6122
6123        public Builder mergeFrom(com.google.protobuf.Message other) {
6124          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile) {
6125            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile)other);
6126          } else {
6127            super.mergeFrom(other);
6128            return this;
6129          }
6130        }
6131
6132        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile other) {
6133          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance()) return this;
6134          if (other.hasReplication()) {
6135            setReplication(other.getReplication());
6136          }
6137          if (other.hasModificationTime()) {
6138            setModificationTime(other.getModificationTime());
6139          }
6140          if (other.hasAccessTime()) {
6141            setAccessTime(other.getAccessTime());
6142          }
6143          if (other.hasPreferredBlockSize()) {
6144            setPreferredBlockSize(other.getPreferredBlockSize());
6145          }
6146          if (other.hasPermission()) {
6147            setPermission(other.getPermission());
6148          }
6149          if (blocksBuilder_ == null) {
6150            if (!other.blocks_.isEmpty()) {
6151              if (blocks_.isEmpty()) {
6152                blocks_ = other.blocks_;
6153                bitField0_ = (bitField0_ & ~0x00000020);
6154              } else {
6155                ensureBlocksIsMutable();
6156                blocks_.addAll(other.blocks_);
6157              }
6158              onChanged();
6159            }
6160          } else {
6161            if (!other.blocks_.isEmpty()) {
6162              if (blocksBuilder_.isEmpty()) {
6163                blocksBuilder_.dispose();
6164                blocksBuilder_ = null;
6165                blocks_ = other.blocks_;
6166                bitField0_ = (bitField0_ & ~0x00000020);
6167                blocksBuilder_ = 
6168                  com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
6169                     getBlocksFieldBuilder() : null;
6170              } else {
6171                blocksBuilder_.addAllMessages(other.blocks_);
6172              }
6173            }
6174          }
6175          if (other.hasFileUC()) {
6176            mergeFileUC(other.getFileUC());
6177          }
6178          if (other.hasAcl()) {
6179            mergeAcl(other.getAcl());
6180          }
6181          if (other.hasXAttrs()) {
6182            mergeXAttrs(other.getXAttrs());
6183          }
6184          this.mergeUnknownFields(other.getUnknownFields());
6185          return this;
6186        }
6187
6188        public final boolean isInitialized() {
6189          for (int i = 0; i < getBlocksCount(); i++) {
6190            if (!getBlocks(i).isInitialized()) {
6191              
6192              return false;
6193            }
6194          }
6195          if (hasXAttrs()) {
6196            if (!getXAttrs().isInitialized()) {
6197              
6198              return false;
6199            }
6200          }
6201          return true;
6202        }
6203
6204        public Builder mergeFrom(
6205            com.google.protobuf.CodedInputStream input,
6206            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6207            throws java.io.IOException {
6208          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile parsedMessage = null;
6209          try {
6210            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6211          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6212            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile) e.getUnfinishedMessage();
6213            throw e;
6214          } finally {
6215            if (parsedMessage != null) {
6216              mergeFrom(parsedMessage);
6217            }
6218          }
6219          return this;
6220        }
6221        private int bitField0_;
6222
6223        // optional uint32 replication = 1;
6224        private int replication_ ;
6225        /**
6226         * <code>optional uint32 replication = 1;</code>
6227         */
6228        public boolean hasReplication() {
6229          return ((bitField0_ & 0x00000001) == 0x00000001);
6230        }
6231        /**
6232         * <code>optional uint32 replication = 1;</code>
6233         */
6234        public int getReplication() {
6235          return replication_;
6236        }
6237        /**
6238         * <code>optional uint32 replication = 1;</code>
6239         */
6240        public Builder setReplication(int value) {
6241          bitField0_ |= 0x00000001;
6242          replication_ = value;
6243          onChanged();
6244          return this;
6245        }
6246        /**
6247         * <code>optional uint32 replication = 1;</code>
6248         */
6249        public Builder clearReplication() {
6250          bitField0_ = (bitField0_ & ~0x00000001);
6251          replication_ = 0;
6252          onChanged();
6253          return this;
6254        }
6255
6256        // optional uint64 modificationTime = 2;
6257        private long modificationTime_ ;
6258        /**
6259         * <code>optional uint64 modificationTime = 2;</code>
6260         */
6261        public boolean hasModificationTime() {
6262          return ((bitField0_ & 0x00000002) == 0x00000002);
6263        }
6264        /**
6265         * <code>optional uint64 modificationTime = 2;</code>
6266         */
6267        public long getModificationTime() {
6268          return modificationTime_;
6269        }
6270        /**
6271         * <code>optional uint64 modificationTime = 2;</code>
6272         */
6273        public Builder setModificationTime(long value) {
6274          bitField0_ |= 0x00000002;
6275          modificationTime_ = value;
6276          onChanged();
6277          return this;
6278        }
6279        /**
6280         * <code>optional uint64 modificationTime = 2;</code>
6281         */
6282        public Builder clearModificationTime() {
6283          bitField0_ = (bitField0_ & ~0x00000002);
6284          modificationTime_ = 0L;
6285          onChanged();
6286          return this;
6287        }
6288
6289        // optional uint64 accessTime = 3;
6290        private long accessTime_ ;
6291        /**
6292         * <code>optional uint64 accessTime = 3;</code>
6293         */
6294        public boolean hasAccessTime() {
6295          return ((bitField0_ & 0x00000004) == 0x00000004);
6296        }
6297        /**
6298         * <code>optional uint64 accessTime = 3;</code>
6299         */
6300        public long getAccessTime() {
6301          return accessTime_;
6302        }
6303        /**
6304         * <code>optional uint64 accessTime = 3;</code>
6305         */
6306        public Builder setAccessTime(long value) {
6307          bitField0_ |= 0x00000004;
6308          accessTime_ = value;
6309          onChanged();
6310          return this;
6311        }
6312        /**
6313         * <code>optional uint64 accessTime = 3;</code>
6314         */
6315        public Builder clearAccessTime() {
6316          bitField0_ = (bitField0_ & ~0x00000004);
6317          accessTime_ = 0L;
6318          onChanged();
6319          return this;
6320        }
6321
6322        // optional uint64 preferredBlockSize = 4;
6323        private long preferredBlockSize_ ;
6324        /**
6325         * <code>optional uint64 preferredBlockSize = 4;</code>
6326         */
6327        public boolean hasPreferredBlockSize() {
6328          return ((bitField0_ & 0x00000008) == 0x00000008);
6329        }
6330        /**
6331         * <code>optional uint64 preferredBlockSize = 4;</code>
6332         */
6333        public long getPreferredBlockSize() {
6334          return preferredBlockSize_;
6335        }
6336        /**
6337         * <code>optional uint64 preferredBlockSize = 4;</code>
6338         */
6339        public Builder setPreferredBlockSize(long value) {
6340          bitField0_ |= 0x00000008;
6341          preferredBlockSize_ = value;
6342          onChanged();
6343          return this;
6344        }
6345        /**
6346         * <code>optional uint64 preferredBlockSize = 4;</code>
6347         */
6348        public Builder clearPreferredBlockSize() {
6349          bitField0_ = (bitField0_ & ~0x00000008);
6350          preferredBlockSize_ = 0L;
6351          onChanged();
6352          return this;
6353        }
6354
6355        // optional fixed64 permission = 5;
6356        private long permission_ ;
6357        /**
6358         * <code>optional fixed64 permission = 5;</code>
6359         */
6360        public boolean hasPermission() {
6361          return ((bitField0_ & 0x00000010) == 0x00000010);
6362        }
6363        /**
6364         * <code>optional fixed64 permission = 5;</code>
6365         */
6366        public long getPermission() {
6367          return permission_;
6368        }
6369        /**
6370         * <code>optional fixed64 permission = 5;</code>
6371         */
6372        public Builder setPermission(long value) {
6373          bitField0_ |= 0x00000010;
6374          permission_ = value;
6375          onChanged();
6376          return this;
6377        }
6378        /**
6379         * <code>optional fixed64 permission = 5;</code>
6380         */
6381        public Builder clearPermission() {
6382          bitField0_ = (bitField0_ & ~0x00000010);
6383          permission_ = 0L;
6384          onChanged();
6385          return this;
6386        }
6387
6388        // repeated .hadoop.hdfs.BlockProto blocks = 6;
6389        private java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> blocks_ =
6390          java.util.Collections.emptyList();
6391        private void ensureBlocksIsMutable() {
6392          if (!((bitField0_ & 0x00000020) == 0x00000020)) {
6393            blocks_ = new java.util.ArrayList<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto>(blocks_);
6394            bitField0_ |= 0x00000020;
6395           }
6396        }
6397
6398        private com.google.protobuf.RepeatedFieldBuilder<
6399            org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> blocksBuilder_;
6400
6401        /**
6402         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6403         */
6404        public java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> getBlocksList() {
6405          if (blocksBuilder_ == null) {
6406            return java.util.Collections.unmodifiableList(blocks_);
6407          } else {
6408            return blocksBuilder_.getMessageList();
6409          }
6410        }
6411        /**
6412         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6413         */
6414        public int getBlocksCount() {
6415          if (blocksBuilder_ == null) {
6416            return blocks_.size();
6417          } else {
6418            return blocksBuilder_.getCount();
6419          }
6420        }
6421        /**
6422         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6423         */
6424        public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlocks(int index) {
6425          if (blocksBuilder_ == null) {
6426            return blocks_.get(index);
6427          } else {
6428            return blocksBuilder_.getMessage(index);
6429          }
6430        }
6431        /**
6432         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6433         */
6434        public Builder setBlocks(
6435            int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
6436          if (blocksBuilder_ == null) {
6437            if (value == null) {
6438              throw new NullPointerException();
6439            }
6440            ensureBlocksIsMutable();
6441            blocks_.set(index, value);
6442            onChanged();
6443          } else {
6444            blocksBuilder_.setMessage(index, value);
6445          }
6446          return this;
6447        }
6448        /**
6449         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6450         */
6451        public Builder setBlocks(
6452            int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) {
6453          if (blocksBuilder_ == null) {
6454            ensureBlocksIsMutable();
6455            blocks_.set(index, builderForValue.build());
6456            onChanged();
6457          } else {
6458            blocksBuilder_.setMessage(index, builderForValue.build());
6459          }
6460          return this;
6461        }
6462        /**
6463         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6464         */
6465        public Builder addBlocks(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
6466          if (blocksBuilder_ == null) {
6467            if (value == null) {
6468              throw new NullPointerException();
6469            }
6470            ensureBlocksIsMutable();
6471            blocks_.add(value);
6472            onChanged();
6473          } else {
6474            blocksBuilder_.addMessage(value);
6475          }
6476          return this;
6477        }
6478        /**
6479         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6480         */
6481        public Builder addBlocks(
6482            int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
6483          if (blocksBuilder_ == null) {
6484            if (value == null) {
6485              throw new NullPointerException();
6486            }
6487            ensureBlocksIsMutable();
6488            blocks_.add(index, value);
6489            onChanged();
6490          } else {
6491            blocksBuilder_.addMessage(index, value);
6492          }
6493          return this;
6494        }
6495        /**
6496         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6497         */
6498        public Builder addBlocks(
6499            org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) {
6500          if (blocksBuilder_ == null) {
6501            ensureBlocksIsMutable();
6502            blocks_.add(builderForValue.build());
6503            onChanged();
6504          } else {
6505            blocksBuilder_.addMessage(builderForValue.build());
6506          }
6507          return this;
6508        }
6509        /**
6510         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6511         */
6512        public Builder addBlocks(
6513            int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) {
6514          if (blocksBuilder_ == null) {
6515            ensureBlocksIsMutable();
6516            blocks_.add(index, builderForValue.build());
6517            onChanged();
6518          } else {
6519            blocksBuilder_.addMessage(index, builderForValue.build());
6520          }
6521          return this;
6522        }
6523        /**
6524         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6525         */
6526        public Builder addAllBlocks(
6527            java.lang.Iterable<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto> values) {
6528          if (blocksBuilder_ == null) {
6529            ensureBlocksIsMutable();
6530            super.addAll(values, blocks_);
6531            onChanged();
6532          } else {
6533            blocksBuilder_.addAllMessages(values);
6534          }
6535          return this;
6536        }
6537        /**
6538         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6539         */
6540        public Builder clearBlocks() {
6541          if (blocksBuilder_ == null) {
6542            blocks_ = java.util.Collections.emptyList();
6543            bitField0_ = (bitField0_ & ~0x00000020);
6544            onChanged();
6545          } else {
6546            blocksBuilder_.clear();
6547          }
6548          return this;
6549        }
6550        /**
6551         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6552         */
6553        public Builder removeBlocks(int index) {
6554          if (blocksBuilder_ == null) {
6555            ensureBlocksIsMutable();
6556            blocks_.remove(index);
6557            onChanged();
6558          } else {
6559            blocksBuilder_.remove(index);
6560          }
6561          return this;
6562        }
6563        /**
6564         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6565         */
6566        public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder getBlocksBuilder(
6567            int index) {
6568          return getBlocksFieldBuilder().getBuilder(index);
6569        }
6570        /**
6571         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6572         */
6573        public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlocksOrBuilder(
6574            int index) {
6575          if (blocksBuilder_ == null) {
6576            return blocks_.get(index);  } else {
6577            return blocksBuilder_.getMessageOrBuilder(index);
6578          }
6579        }
6580        /**
6581         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6582         */
6583        public java.util.List<? extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
6584             getBlocksOrBuilderList() {
6585          if (blocksBuilder_ != null) {
6586            return blocksBuilder_.getMessageOrBuilderList();
6587          } else {
6588            return java.util.Collections.unmodifiableList(blocks_);
6589          }
6590        }
6591        /**
6592         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6593         */
6594        public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder addBlocksBuilder() {
6595          return getBlocksFieldBuilder().addBuilder(
6596              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance());
6597        }
6598        /**
6599         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6600         */
6601        public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder addBlocksBuilder(
6602            int index) {
6603          return getBlocksFieldBuilder().addBuilder(
6604              index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance());
6605        }
6606        /**
6607         * <code>repeated .hadoop.hdfs.BlockProto blocks = 6;</code>
6608         */
6609        public java.util.List<org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder> 
6610             getBlocksBuilderList() {
6611          return getBlocksFieldBuilder().getBuilderList();
6612        }
6613        private com.google.protobuf.RepeatedFieldBuilder<
6614            org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
6615            getBlocksFieldBuilder() {
6616          if (blocksBuilder_ == null) {
6617            blocksBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
6618                org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder>(
6619                    blocks_,
6620                    ((bitField0_ & 0x00000020) == 0x00000020),
6621                    getParentForChildren(),
6622                    isClean());
6623            blocks_ = null;
6624          }
6625          return blocksBuilder_;
6626        }
6627
6628        // optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;
6629        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
6630        private com.google.protobuf.SingleFieldBuilder<
6631            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder> fileUCBuilder_;
6632        /**
6633         * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6634         */
6635        public boolean hasFileUC() {
6636          return ((bitField0_ & 0x00000040) == 0x00000040);
6637        }
6638        /**
6639         * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6640         */
6641        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature getFileUC() {
6642          if (fileUCBuilder_ == null) {
6643            return fileUC_;
6644          } else {
6645            return fileUCBuilder_.getMessage();
6646          }
6647        }
6648        /**
6649         * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6650         */
6651        public Builder setFileUC(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature value) {
6652          if (fileUCBuilder_ == null) {
6653            if (value == null) {
6654              throw new NullPointerException();
6655            }
6656            fileUC_ = value;
6657            onChanged();
6658          } else {
6659            fileUCBuilder_.setMessage(value);
6660          }
6661          bitField0_ |= 0x00000040;
6662          return this;
6663        }
6664        /**
6665         * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6666         */
6667        public Builder setFileUC(
6668            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder builderForValue) {
6669          if (fileUCBuilder_ == null) {
6670            fileUC_ = builderForValue.build();
6671            onChanged();
6672          } else {
6673            fileUCBuilder_.setMessage(builderForValue.build());
6674          }
6675          bitField0_ |= 0x00000040;
6676          return this;
6677        }
6678        /**
6679         * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6680         */
6681        public Builder mergeFileUC(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature value) {
6682          if (fileUCBuilder_ == null) {
6683            if (((bitField0_ & 0x00000040) == 0x00000040) &&
6684                fileUC_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance()) {
6685              fileUC_ =
6686                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.newBuilder(fileUC_).mergeFrom(value).buildPartial();
6687            } else {
6688              fileUC_ = value;
6689            }
6690            onChanged();
6691          } else {
6692            fileUCBuilder_.mergeFrom(value);
6693          }
6694          bitField0_ |= 0x00000040;
6695          return this;
6696        }
6697        /**
6698         * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6699         */
6700        public Builder clearFileUC() {
6701          if (fileUCBuilder_ == null) {
6702            fileUC_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.getDefaultInstance();
6703            onChanged();
6704          } else {
6705            fileUCBuilder_.clear();
6706          }
6707          bitField0_ = (bitField0_ & ~0x00000040);
6708          return this;
6709        }
6710        /**
6711         * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6712         */
6713        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder getFileUCBuilder() {
6714          bitField0_ |= 0x00000040;
6715          onChanged();
6716          return getFileUCFieldBuilder().getBuilder();
6717        }
6718        /**
6719         * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6720         */
6721        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder getFileUCOrBuilder() {
6722          if (fileUCBuilder_ != null) {
6723            return fileUCBuilder_.getMessageOrBuilder();
6724          } else {
6725            return fileUC_;
6726          }
6727        }
6728        /**
6729         * <code>optional .hadoop.hdfs.fsimage.INodeSection.FileUnderConstructionFeature fileUC = 7;</code>
6730         */
6731        private com.google.protobuf.SingleFieldBuilder<
6732            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder> 
6733            getFileUCFieldBuilder() {
6734          if (fileUCBuilder_ == null) {
6735            fileUCBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6736                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeature.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.FileUnderConstructionFeatureOrBuilder>(
6737                    fileUC_,
6738                    getParentForChildren(),
6739                    isClean());
6740            fileUC_ = null;
6741          }
6742          return fileUCBuilder_;
6743        }
6744
6745        // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;
6746        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
6747        private com.google.protobuf.SingleFieldBuilder<
6748            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> aclBuilder_;
6749        /**
6750         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6751         */
6752        public boolean hasAcl() {
6753          return ((bitField0_ & 0x00000080) == 0x00000080);
6754        }
6755        /**
6756         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6757         */
6758        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
6759          if (aclBuilder_ == null) {
6760            return acl_;
6761          } else {
6762            return aclBuilder_.getMessage();
6763          }
6764        }
6765        /**
6766         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6767         */
6768        public Builder setAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
6769          if (aclBuilder_ == null) {
6770            if (value == null) {
6771              throw new NullPointerException();
6772            }
6773            acl_ = value;
6774            onChanged();
6775          } else {
6776            aclBuilder_.setMessage(value);
6777          }
6778          bitField0_ |= 0x00000080;
6779          return this;
6780        }
6781        /**
6782         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6783         */
6784        public Builder setAcl(
6785            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder builderForValue) {
6786          if (aclBuilder_ == null) {
6787            acl_ = builderForValue.build();
6788            onChanged();
6789          } else {
6790            aclBuilder_.setMessage(builderForValue.build());
6791          }
6792          bitField0_ |= 0x00000080;
6793          return this;
6794        }
6795        /**
6796         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6797         */
6798        public Builder mergeAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
6799          if (aclBuilder_ == null) {
6800            if (((bitField0_ & 0x00000080) == 0x00000080) &&
6801                acl_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance()) {
6802              acl_ =
6803                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.newBuilder(acl_).mergeFrom(value).buildPartial();
6804            } else {
6805              acl_ = value;
6806            }
6807            onChanged();
6808          } else {
6809            aclBuilder_.mergeFrom(value);
6810          }
6811          bitField0_ |= 0x00000080;
6812          return this;
6813        }
6814        /**
6815         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6816         */
6817        public Builder clearAcl() {
6818          if (aclBuilder_ == null) {
6819            acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
6820            onChanged();
6821          } else {
6822            aclBuilder_.clear();
6823          }
6824          bitField0_ = (bitField0_ & ~0x00000080);
6825          return this;
6826        }
6827        /**
6828         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6829         */
6830        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder getAclBuilder() {
6831          bitField0_ |= 0x00000080;
6832          onChanged();
6833          return getAclFieldBuilder().getBuilder();
6834        }
6835        /**
6836         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6837         */
6838        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
6839          if (aclBuilder_ != null) {
6840            return aclBuilder_.getMessageOrBuilder();
6841          } else {
6842            return acl_;
6843          }
6844        }
6845        /**
6846         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 8;</code>
6847         */
6848        private com.google.protobuf.SingleFieldBuilder<
6849            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> 
6850            getAclFieldBuilder() {
6851          if (aclBuilder_ == null) {
6852            aclBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6853                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder>(
6854                    acl_,
6855                    getParentForChildren(),
6856                    isClean());
6857            acl_ = null;
6858          }
6859          return aclBuilder_;
6860        }
6861
6862        // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;
6863        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
6864        private com.google.protobuf.SingleFieldBuilder<
6865            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder> xAttrsBuilder_;
6866        /**
6867         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6868         */
6869        public boolean hasXAttrs() {
6870          return ((bitField0_ & 0x00000100) == 0x00000100);
6871        }
6872        /**
6873         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6874         */
6875        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs() {
6876          if (xAttrsBuilder_ == null) {
6877            return xAttrs_;
6878          } else {
6879            return xAttrsBuilder_.getMessage();
6880          }
6881        }
6882        /**
6883         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6884         */
6885        public Builder setXAttrs(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto value) {
6886          if (xAttrsBuilder_ == null) {
6887            if (value == null) {
6888              throw new NullPointerException();
6889            }
6890            xAttrs_ = value;
6891            onChanged();
6892          } else {
6893            xAttrsBuilder_.setMessage(value);
6894          }
6895          bitField0_ |= 0x00000100;
6896          return this;
6897        }
6898        /**
6899         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6900         */
6901        public Builder setXAttrs(
6902            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder builderForValue) {
6903          if (xAttrsBuilder_ == null) {
6904            xAttrs_ = builderForValue.build();
6905            onChanged();
6906          } else {
6907            xAttrsBuilder_.setMessage(builderForValue.build());
6908          }
6909          bitField0_ |= 0x00000100;
6910          return this;
6911        }
6912        /**
6913         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6914         */
6915        public Builder mergeXAttrs(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto value) {
6916          if (xAttrsBuilder_ == null) {
6917            if (((bitField0_ & 0x00000100) == 0x00000100) &&
6918                xAttrs_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance()) {
6919              xAttrs_ =
6920                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.newBuilder(xAttrs_).mergeFrom(value).buildPartial();
6921            } else {
6922              xAttrs_ = value;
6923            }
6924            onChanged();
6925          } else {
6926            xAttrsBuilder_.mergeFrom(value);
6927          }
6928          bitField0_ |= 0x00000100;
6929          return this;
6930        }
6931        /**
6932         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6933         */
6934        public Builder clearXAttrs() {
6935          if (xAttrsBuilder_ == null) {
6936            xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
6937            onChanged();
6938          } else {
6939            xAttrsBuilder_.clear();
6940          }
6941          bitField0_ = (bitField0_ & ~0x00000100);
6942          return this;
6943        }
6944        /**
6945         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6946         */
6947        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder getXAttrsBuilder() {
6948          bitField0_ |= 0x00000100;
6949          onChanged();
6950          return getXAttrsFieldBuilder().getBuilder();
6951        }
6952        /**
6953         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6954         */
6955        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder() {
6956          if (xAttrsBuilder_ != null) {
6957            return xAttrsBuilder_.getMessageOrBuilder();
6958          } else {
6959            return xAttrs_;
6960          }
6961        }
6962        /**
6963         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 9;</code>
6964         */
6965        private com.google.protobuf.SingleFieldBuilder<
6966            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder> 
6967            getXAttrsFieldBuilder() {
6968          if (xAttrsBuilder_ == null) {
6969            xAttrsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6970                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder>(
6971                    xAttrs_,
6972                    getParentForChildren(),
6973                    isClean());
6974            xAttrs_ = null;
6975          }
6976          return xAttrsBuilder_;
6977        }
6978
6979        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INodeFile)
6980      }
6981
6982      static {
6983        defaultInstance = new INodeFile(true);
6984        defaultInstance.initFields();
6985      }
6986
6987      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INodeFile)
6988    }
6989
6990    public interface INodeDirectoryOrBuilder
6991        extends com.google.protobuf.MessageOrBuilder {
6992
6993      // optional uint64 modificationTime = 1;
6994      /**
6995       * <code>optional uint64 modificationTime = 1;</code>
6996       */
6997      boolean hasModificationTime();
6998      /**
6999       * <code>optional uint64 modificationTime = 1;</code>
7000       */
7001      long getModificationTime();
7002
7003      // optional uint64 nsQuota = 2;
7004      /**
7005       * <code>optional uint64 nsQuota = 2;</code>
7006       *
7007       * <pre>
7008       * namespace quota
7009       * </pre>
7010       */
7011      boolean hasNsQuota();
7012      /**
7013       * <code>optional uint64 nsQuota = 2;</code>
7014       *
7015       * <pre>
7016       * namespace quota
7017       * </pre>
7018       */
7019      long getNsQuota();
7020
7021      // optional uint64 dsQuota = 3;
7022      /**
7023       * <code>optional uint64 dsQuota = 3;</code>
7024       *
7025       * <pre>
7026       * diskspace quota
7027       * </pre>
7028       */
7029      boolean hasDsQuota();
7030      /**
7031       * <code>optional uint64 dsQuota = 3;</code>
7032       *
7033       * <pre>
7034       * diskspace quota
7035       * </pre>
7036       */
7037      long getDsQuota();
7038
7039      // optional fixed64 permission = 4;
7040      /**
7041       * <code>optional fixed64 permission = 4;</code>
7042       */
7043      boolean hasPermission();
7044      /**
7045       * <code>optional fixed64 permission = 4;</code>
7046       */
7047      long getPermission();
7048
7049      // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;
7050      /**
7051       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7052       */
7053      boolean hasAcl();
7054      /**
7055       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7056       */
7057      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl();
7058      /**
7059       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7060       */
7061      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder();
7062
7063      // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;
7064      /**
7065       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7066       */
7067      boolean hasXAttrs();
7068      /**
7069       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7070       */
7071      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs();
7072      /**
7073       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7074       */
7075      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder();
7076    }
7077    /**
7078     * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeDirectory}
7079     */
7080    public static final class INodeDirectory extends
7081        com.google.protobuf.GeneratedMessage
7082        implements INodeDirectoryOrBuilder {
7083      // Use INodeDirectory.newBuilder() to construct.
7084      private INodeDirectory(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7085        super(builder);
7086        this.unknownFields = builder.getUnknownFields();
7087      }
7088      private INodeDirectory(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7089
7090      private static final INodeDirectory defaultInstance;
7091      public static INodeDirectory getDefaultInstance() {
7092        return defaultInstance;
7093      }
7094
7095      public INodeDirectory getDefaultInstanceForType() {
7096        return defaultInstance;
7097      }
7098
7099      private final com.google.protobuf.UnknownFieldSet unknownFields;
7100      @java.lang.Override
7101      public final com.google.protobuf.UnknownFieldSet
7102          getUnknownFields() {
7103        return this.unknownFields;
7104      }
7105      private INodeDirectory(
7106          com.google.protobuf.CodedInputStream input,
7107          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7108          throws com.google.protobuf.InvalidProtocolBufferException {
7109        initFields();
7110        int mutable_bitField0_ = 0;
7111        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7112            com.google.protobuf.UnknownFieldSet.newBuilder();
7113        try {
7114          boolean done = false;
7115          while (!done) {
7116            int tag = input.readTag();
7117            switch (tag) {
7118              case 0:
7119                done = true;
7120                break;
7121              default: {
7122                if (!parseUnknownField(input, unknownFields,
7123                                       extensionRegistry, tag)) {
7124                  done = true;
7125                }
7126                break;
7127              }
7128              case 8: {
7129                bitField0_ |= 0x00000001;
7130                modificationTime_ = input.readUInt64();
7131                break;
7132              }
7133              case 16: {
7134                bitField0_ |= 0x00000002;
7135                nsQuota_ = input.readUInt64();
7136                break;
7137              }
7138              case 24: {
7139                bitField0_ |= 0x00000004;
7140                dsQuota_ = input.readUInt64();
7141                break;
7142              }
7143              case 33: {
7144                bitField0_ |= 0x00000008;
7145                permission_ = input.readFixed64();
7146                break;
7147              }
7148              case 42: {
7149                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder subBuilder = null;
7150                if (((bitField0_ & 0x00000010) == 0x00000010)) {
7151                  subBuilder = acl_.toBuilder();
7152                }
7153                acl_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.PARSER, extensionRegistry);
7154                if (subBuilder != null) {
7155                  subBuilder.mergeFrom(acl_);
7156                  acl_ = subBuilder.buildPartial();
7157                }
7158                bitField0_ |= 0x00000010;
7159                break;
7160              }
7161              case 50: {
7162                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder subBuilder = null;
7163                if (((bitField0_ & 0x00000020) == 0x00000020)) {
7164                  subBuilder = xAttrs_.toBuilder();
7165                }
7166                xAttrs_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.PARSER, extensionRegistry);
7167                if (subBuilder != null) {
7168                  subBuilder.mergeFrom(xAttrs_);
7169                  xAttrs_ = subBuilder.buildPartial();
7170                }
7171                bitField0_ |= 0x00000020;
7172                break;
7173              }
7174            }
7175          }
7176        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7177          throw e.setUnfinishedMessage(this);
7178        } catch (java.io.IOException e) {
7179          throw new com.google.protobuf.InvalidProtocolBufferException(
7180              e.getMessage()).setUnfinishedMessage(this);
7181        } finally {
7182          this.unknownFields = unknownFields.build();
7183          makeExtensionsImmutable();
7184        }
7185      }
7186      public static final com.google.protobuf.Descriptors.Descriptor
7187          getDescriptor() {
7188        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
7189      }
7190
7191      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7192          internalGetFieldAccessorTable() {
7193        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable
7194            .ensureFieldAccessorsInitialized(
7195                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder.class);
7196      }
7197
7198      public static com.google.protobuf.Parser<INodeDirectory> PARSER =
7199          new com.google.protobuf.AbstractParser<INodeDirectory>() {
7200        public INodeDirectory parsePartialFrom(
7201            com.google.protobuf.CodedInputStream input,
7202            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7203            throws com.google.protobuf.InvalidProtocolBufferException {
7204          return new INodeDirectory(input, extensionRegistry);
7205        }
7206      };
7207
7208      @java.lang.Override
7209      public com.google.protobuf.Parser<INodeDirectory> getParserForType() {
7210        return PARSER;
7211      }
7212
7213      private int bitField0_;
7214      // optional uint64 modificationTime = 1;
7215      public static final int MODIFICATIONTIME_FIELD_NUMBER = 1;
7216      private long modificationTime_;
7217      /**
7218       * <code>optional uint64 modificationTime = 1;</code>
7219       */
7220      public boolean hasModificationTime() {
7221        return ((bitField0_ & 0x00000001) == 0x00000001);
7222      }
7223      /**
7224       * <code>optional uint64 modificationTime = 1;</code>
7225       */
7226      public long getModificationTime() {
7227        return modificationTime_;
7228      }
7229
7230      // optional uint64 nsQuota = 2;
7231      public static final int NSQUOTA_FIELD_NUMBER = 2;
7232      private long nsQuota_;
7233      /**
7234       * <code>optional uint64 nsQuota = 2;</code>
7235       *
7236       * <pre>
7237       * namespace quota
7238       * </pre>
7239       */
7240      public boolean hasNsQuota() {
7241        return ((bitField0_ & 0x00000002) == 0x00000002);
7242      }
7243      /**
7244       * <code>optional uint64 nsQuota = 2;</code>
7245       *
7246       * <pre>
7247       * namespace quota
7248       * </pre>
7249       */
7250      public long getNsQuota() {
7251        return nsQuota_;
7252      }
7253
7254      // optional uint64 dsQuota = 3;
7255      public static final int DSQUOTA_FIELD_NUMBER = 3;
7256      private long dsQuota_;
7257      /**
7258       * <code>optional uint64 dsQuota = 3;</code>
7259       *
7260       * <pre>
7261       * diskspace quota
7262       * </pre>
7263       */
7264      public boolean hasDsQuota() {
7265        return ((bitField0_ & 0x00000004) == 0x00000004);
7266      }
7267      /**
7268       * <code>optional uint64 dsQuota = 3;</code>
7269       *
7270       * <pre>
7271       * diskspace quota
7272       * </pre>
7273       */
7274      public long getDsQuota() {
7275        return dsQuota_;
7276      }
7277
7278      // optional fixed64 permission = 4;
7279      public static final int PERMISSION_FIELD_NUMBER = 4;
7280      private long permission_;
7281      /**
7282       * <code>optional fixed64 permission = 4;</code>
7283       */
7284      public boolean hasPermission() {
7285        return ((bitField0_ & 0x00000008) == 0x00000008);
7286      }
7287      /**
7288       * <code>optional fixed64 permission = 4;</code>
7289       */
7290      public long getPermission() {
7291        return permission_;
7292      }
7293
7294      // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;
7295      public static final int ACL_FIELD_NUMBER = 5;
7296      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_;
7297      /**
7298       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7299       */
7300      public boolean hasAcl() {
7301        return ((bitField0_ & 0x00000010) == 0x00000010);
7302      }
7303      /**
7304       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7305       */
7306      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
7307        return acl_;
7308      }
7309      /**
7310       * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7311       */
7312      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
7313        return acl_;
7314      }
7315
7316      // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;
7317      public static final int XATTRS_FIELD_NUMBER = 6;
7318      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto xAttrs_;
7319      /**
7320       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7321       */
7322      public boolean hasXAttrs() {
7323        return ((bitField0_ & 0x00000020) == 0x00000020);
7324      }
7325      /**
7326       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7327       */
7328      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs() {
7329        return xAttrs_;
7330      }
7331      /**
7332       * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7333       */
7334      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder() {
7335        return xAttrs_;
7336      }
7337
7338      private void initFields() {
7339        modificationTime_ = 0L;
7340        nsQuota_ = 0L;
7341        dsQuota_ = 0L;
7342        permission_ = 0L;
7343        acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
7344        xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
7345      }
7346      private byte memoizedIsInitialized = -1;
7347      public final boolean isInitialized() {
7348        byte isInitialized = memoizedIsInitialized;
7349        if (isInitialized != -1) return isInitialized == 1;
7350
7351        if (hasXAttrs()) {
7352          if (!getXAttrs().isInitialized()) {
7353            memoizedIsInitialized = 0;
7354            return false;
7355          }
7356        }
7357        memoizedIsInitialized = 1;
7358        return true;
7359      }
7360
7361      public void writeTo(com.google.protobuf.CodedOutputStream output)
7362                          throws java.io.IOException {
7363        getSerializedSize();
7364        if (((bitField0_ & 0x00000001) == 0x00000001)) {
7365          output.writeUInt64(1, modificationTime_);
7366        }
7367        if (((bitField0_ & 0x00000002) == 0x00000002)) {
7368          output.writeUInt64(2, nsQuota_);
7369        }
7370        if (((bitField0_ & 0x00000004) == 0x00000004)) {
7371          output.writeUInt64(3, dsQuota_);
7372        }
7373        if (((bitField0_ & 0x00000008) == 0x00000008)) {
7374          output.writeFixed64(4, permission_);
7375        }
7376        if (((bitField0_ & 0x00000010) == 0x00000010)) {
7377          output.writeMessage(5, acl_);
7378        }
7379        if (((bitField0_ & 0x00000020) == 0x00000020)) {
7380          output.writeMessage(6, xAttrs_);
7381        }
7382        getUnknownFields().writeTo(output);
7383      }
7384
7385      private int memoizedSerializedSize = -1;
7386      public int getSerializedSize() {
7387        int size = memoizedSerializedSize;
7388        if (size != -1) return size;
7389
7390        size = 0;
7391        if (((bitField0_ & 0x00000001) == 0x00000001)) {
7392          size += com.google.protobuf.CodedOutputStream
7393            .computeUInt64Size(1, modificationTime_);
7394        }
7395        if (((bitField0_ & 0x00000002) == 0x00000002)) {
7396          size += com.google.protobuf.CodedOutputStream
7397            .computeUInt64Size(2, nsQuota_);
7398        }
7399        if (((bitField0_ & 0x00000004) == 0x00000004)) {
7400          size += com.google.protobuf.CodedOutputStream
7401            .computeUInt64Size(3, dsQuota_);
7402        }
7403        if (((bitField0_ & 0x00000008) == 0x00000008)) {
7404          size += com.google.protobuf.CodedOutputStream
7405            .computeFixed64Size(4, permission_);
7406        }
7407        if (((bitField0_ & 0x00000010) == 0x00000010)) {
7408          size += com.google.protobuf.CodedOutputStream
7409            .computeMessageSize(5, acl_);
7410        }
7411        if (((bitField0_ & 0x00000020) == 0x00000020)) {
7412          size += com.google.protobuf.CodedOutputStream
7413            .computeMessageSize(6, xAttrs_);
7414        }
7415        size += getUnknownFields().getSerializedSize();
7416        memoizedSerializedSize = size;
7417        return size;
7418      }
7419
7420      private static final long serialVersionUID = 0L;
7421      @java.lang.Override
7422      protected java.lang.Object writeReplace()
7423          throws java.io.ObjectStreamException {
7424        return super.writeReplace();
7425      }
7426
7427      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7428          com.google.protobuf.ByteString data)
7429          throws com.google.protobuf.InvalidProtocolBufferException {
7430        return PARSER.parseFrom(data);
7431      }
7432      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7433          com.google.protobuf.ByteString data,
7434          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7435          throws com.google.protobuf.InvalidProtocolBufferException {
7436        return PARSER.parseFrom(data, extensionRegistry);
7437      }
7438      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(byte[] data)
7439          throws com.google.protobuf.InvalidProtocolBufferException {
7440        return PARSER.parseFrom(data);
7441      }
7442      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7443          byte[] data,
7444          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7445          throws com.google.protobuf.InvalidProtocolBufferException {
7446        return PARSER.parseFrom(data, extensionRegistry);
7447      }
7448      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(java.io.InputStream input)
7449          throws java.io.IOException {
7450        return PARSER.parseFrom(input);
7451      }
7452      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7453          java.io.InputStream input,
7454          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7455          throws java.io.IOException {
7456        return PARSER.parseFrom(input, extensionRegistry);
7457      }
7458      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseDelimitedFrom(java.io.InputStream input)
7459          throws java.io.IOException {
7460        return PARSER.parseDelimitedFrom(input);
7461      }
7462      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseDelimitedFrom(
7463          java.io.InputStream input,
7464          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7465          throws java.io.IOException {
7466        return PARSER.parseDelimitedFrom(input, extensionRegistry);
7467      }
7468      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7469          com.google.protobuf.CodedInputStream input)
7470          throws java.io.IOException {
7471        return PARSER.parseFrom(input);
7472      }
7473      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parseFrom(
7474          com.google.protobuf.CodedInputStream input,
7475          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7476          throws java.io.IOException {
7477        return PARSER.parseFrom(input, extensionRegistry);
7478      }
7479
7480      public static Builder newBuilder() { return Builder.create(); }
7481      public Builder newBuilderForType() { return newBuilder(); }
7482      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory prototype) {
7483        return newBuilder().mergeFrom(prototype);
7484      }
7485      public Builder toBuilder() { return newBuilder(this); }
7486
7487      @java.lang.Override
7488      protected Builder newBuilderForType(
7489          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7490        Builder builder = new Builder(parent);
7491        return builder;
7492      }
7493      /**
7494       * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeDirectory}
7495       */
7496      public static final class Builder extends
7497          com.google.protobuf.GeneratedMessage.Builder<Builder>
7498         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder {
7499        public static final com.google.protobuf.Descriptors.Descriptor
7500            getDescriptor() {
7501          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
7502        }
7503
7504        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7505            internalGetFieldAccessorTable() {
7506          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable
7507              .ensureFieldAccessorsInitialized(
7508                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder.class);
7509        }
7510
7511        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.newBuilder()
7512        private Builder() {
7513          maybeForceBuilderInitialization();
7514        }
7515
7516        private Builder(
7517            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7518          super(parent);
7519          maybeForceBuilderInitialization();
7520        }
7521        private void maybeForceBuilderInitialization() {
7522          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7523            getAclFieldBuilder();
7524            getXAttrsFieldBuilder();
7525          }
7526        }
7527        private static Builder create() {
7528          return new Builder();
7529        }
7530
7531        public Builder clear() {
7532          super.clear();
7533          modificationTime_ = 0L;
7534          bitField0_ = (bitField0_ & ~0x00000001);
7535          nsQuota_ = 0L;
7536          bitField0_ = (bitField0_ & ~0x00000002);
7537          dsQuota_ = 0L;
7538          bitField0_ = (bitField0_ & ~0x00000004);
7539          permission_ = 0L;
7540          bitField0_ = (bitField0_ & ~0x00000008);
7541          if (aclBuilder_ == null) {
7542            acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
7543          } else {
7544            aclBuilder_.clear();
7545          }
7546          bitField0_ = (bitField0_ & ~0x00000010);
7547          if (xAttrsBuilder_ == null) {
7548            xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
7549          } else {
7550            xAttrsBuilder_.clear();
7551          }
7552          bitField0_ = (bitField0_ & ~0x00000020);
7553          return this;
7554        }
7555
7556        public Builder clone() {
7557          return create().mergeFrom(buildPartial());
7558        }
7559
7560        public com.google.protobuf.Descriptors.Descriptor
7561            getDescriptorForType() {
7562          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
7563        }
7564
7565        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDefaultInstanceForType() {
7566          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
7567        }
7568
7569        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory build() {
7570          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory result = buildPartial();
7571          if (!result.isInitialized()) {
7572            throw newUninitializedMessageException(result);
7573          }
7574          return result;
7575        }
7576
7577        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory buildPartial() {
7578          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory(this);
7579          int from_bitField0_ = bitField0_;
7580          int to_bitField0_ = 0;
7581          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7582            to_bitField0_ |= 0x00000001;
7583          }
7584          result.modificationTime_ = modificationTime_;
7585          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
7586            to_bitField0_ |= 0x00000002;
7587          }
7588          result.nsQuota_ = nsQuota_;
7589          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
7590            to_bitField0_ |= 0x00000004;
7591          }
7592          result.dsQuota_ = dsQuota_;
7593          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
7594            to_bitField0_ |= 0x00000008;
7595          }
7596          result.permission_ = permission_;
7597          if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
7598            to_bitField0_ |= 0x00000010;
7599          }
7600          if (aclBuilder_ == null) {
7601            result.acl_ = acl_;
7602          } else {
7603            result.acl_ = aclBuilder_.build();
7604          }
7605          if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
7606            to_bitField0_ |= 0x00000020;
7607          }
7608          if (xAttrsBuilder_ == null) {
7609            result.xAttrs_ = xAttrs_;
7610          } else {
7611            result.xAttrs_ = xAttrsBuilder_.build();
7612          }
7613          result.bitField0_ = to_bitField0_;
7614          onBuilt();
7615          return result;
7616        }
7617
7618        public Builder mergeFrom(com.google.protobuf.Message other) {
7619          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory) {
7620            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory)other);
7621          } else {
7622            super.mergeFrom(other);
7623            return this;
7624          }
7625        }
7626
7627        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory other) {
7628          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance()) return this;
7629          if (other.hasModificationTime()) {
7630            setModificationTime(other.getModificationTime());
7631          }
7632          if (other.hasNsQuota()) {
7633            setNsQuota(other.getNsQuota());
7634          }
7635          if (other.hasDsQuota()) {
7636            setDsQuota(other.getDsQuota());
7637          }
7638          if (other.hasPermission()) {
7639            setPermission(other.getPermission());
7640          }
7641          if (other.hasAcl()) {
7642            mergeAcl(other.getAcl());
7643          }
7644          if (other.hasXAttrs()) {
7645            mergeXAttrs(other.getXAttrs());
7646          }
7647          this.mergeUnknownFields(other.getUnknownFields());
7648          return this;
7649        }
7650
7651        public final boolean isInitialized() {
7652          if (hasXAttrs()) {
7653            if (!getXAttrs().isInitialized()) {
7654              
7655              return false;
7656            }
7657          }
7658          return true;
7659        }
7660
7661        public Builder mergeFrom(
7662            com.google.protobuf.CodedInputStream input,
7663            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7664            throws java.io.IOException {
7665          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory parsedMessage = null;
7666          try {
7667            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7668          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7669            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory) e.getUnfinishedMessage();
7670            throw e;
7671          } finally {
7672            if (parsedMessage != null) {
7673              mergeFrom(parsedMessage);
7674            }
7675          }
7676          return this;
7677        }
7678        private int bitField0_;
7679
7680        // optional uint64 modificationTime = 1;
7681        private long modificationTime_ ;
7682        /**
7683         * <code>optional uint64 modificationTime = 1;</code>
7684         */
7685        public boolean hasModificationTime() {
7686          return ((bitField0_ & 0x00000001) == 0x00000001);
7687        }
7688        /**
7689         * <code>optional uint64 modificationTime = 1;</code>
7690         */
7691        public long getModificationTime() {
7692          return modificationTime_;
7693        }
7694        /**
7695         * <code>optional uint64 modificationTime = 1;</code>
7696         */
7697        public Builder setModificationTime(long value) {
7698          bitField0_ |= 0x00000001;
7699          modificationTime_ = value;
7700          onChanged();
7701          return this;
7702        }
7703        /**
7704         * <code>optional uint64 modificationTime = 1;</code>
7705         */
7706        public Builder clearModificationTime() {
7707          bitField0_ = (bitField0_ & ~0x00000001);
7708          modificationTime_ = 0L;
7709          onChanged();
7710          return this;
7711        }
7712
7713        // optional uint64 nsQuota = 2;
7714        private long nsQuota_ ;
7715        /**
7716         * <code>optional uint64 nsQuota = 2;</code>
7717         *
7718         * <pre>
7719         * namespace quota
7720         * </pre>
7721         */
7722        public boolean hasNsQuota() {
7723          return ((bitField0_ & 0x00000002) == 0x00000002);
7724        }
7725        /**
7726         * <code>optional uint64 nsQuota = 2;</code>
7727         *
7728         * <pre>
7729         * namespace quota
7730         * </pre>
7731         */
7732        public long getNsQuota() {
7733          return nsQuota_;
7734        }
7735        /**
7736         * <code>optional uint64 nsQuota = 2;</code>
7737         *
7738         * <pre>
7739         * namespace quota
7740         * </pre>
7741         */
7742        public Builder setNsQuota(long value) {
7743          bitField0_ |= 0x00000002;
7744          nsQuota_ = value;
7745          onChanged();
7746          return this;
7747        }
7748        /**
7749         * <code>optional uint64 nsQuota = 2;</code>
7750         *
7751         * <pre>
7752         * namespace quota
7753         * </pre>
7754         */
7755        public Builder clearNsQuota() {
7756          bitField0_ = (bitField0_ & ~0x00000002);
7757          nsQuota_ = 0L;
7758          onChanged();
7759          return this;
7760        }
7761
7762        // optional uint64 dsQuota = 3;
7763        private long dsQuota_ ;
7764        /**
7765         * <code>optional uint64 dsQuota = 3;</code>
7766         *
7767         * <pre>
7768         * diskspace quota
7769         * </pre>
7770         */
7771        public boolean hasDsQuota() {
7772          return ((bitField0_ & 0x00000004) == 0x00000004);
7773        }
7774        /**
7775         * <code>optional uint64 dsQuota = 3;</code>
7776         *
7777         * <pre>
7778         * diskspace quota
7779         * </pre>
7780         */
7781        public long getDsQuota() {
7782          return dsQuota_;
7783        }
7784        /**
7785         * <code>optional uint64 dsQuota = 3;</code>
7786         *
7787         * <pre>
7788         * diskspace quota
7789         * </pre>
7790         */
7791        public Builder setDsQuota(long value) {
7792          bitField0_ |= 0x00000004;
7793          dsQuota_ = value;
7794          onChanged();
7795          return this;
7796        }
7797        /**
7798         * <code>optional uint64 dsQuota = 3;</code>
7799         *
7800         * <pre>
7801         * diskspace quota
7802         * </pre>
7803         */
7804        public Builder clearDsQuota() {
7805          bitField0_ = (bitField0_ & ~0x00000004);
7806          dsQuota_ = 0L;
7807          onChanged();
7808          return this;
7809        }
7810
7811        // optional fixed64 permission = 4;
7812        private long permission_ ;
7813        /**
7814         * <code>optional fixed64 permission = 4;</code>
7815         */
7816        public boolean hasPermission() {
7817          return ((bitField0_ & 0x00000008) == 0x00000008);
7818        }
7819        /**
7820         * <code>optional fixed64 permission = 4;</code>
7821         */
7822        public long getPermission() {
7823          return permission_;
7824        }
7825        /**
7826         * <code>optional fixed64 permission = 4;</code>
7827         */
7828        public Builder setPermission(long value) {
7829          bitField0_ |= 0x00000008;
7830          permission_ = value;
7831          onChanged();
7832          return this;
7833        }
7834        /**
7835         * <code>optional fixed64 permission = 4;</code>
7836         */
7837        public Builder clearPermission() {
7838          bitField0_ = (bitField0_ & ~0x00000008);
7839          permission_ = 0L;
7840          onChanged();
7841          return this;
7842        }
7843
7844        // optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;
7845        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
7846        private com.google.protobuf.SingleFieldBuilder<
7847            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> aclBuilder_;
7848        /**
7849         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7850         */
7851        public boolean hasAcl() {
7852          return ((bitField0_ & 0x00000010) == 0x00000010);
7853        }
7854        /**
7855         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7856         */
7857        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto getAcl() {
7858          if (aclBuilder_ == null) {
7859            return acl_;
7860          } else {
7861            return aclBuilder_.getMessage();
7862          }
7863        }
7864        /**
7865         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7866         */
7867        public Builder setAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
7868          if (aclBuilder_ == null) {
7869            if (value == null) {
7870              throw new NullPointerException();
7871            }
7872            acl_ = value;
7873            onChanged();
7874          } else {
7875            aclBuilder_.setMessage(value);
7876          }
7877          bitField0_ |= 0x00000010;
7878          return this;
7879        }
7880        /**
7881         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7882         */
7883        public Builder setAcl(
7884            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder builderForValue) {
7885          if (aclBuilder_ == null) {
7886            acl_ = builderForValue.build();
7887            onChanged();
7888          } else {
7889            aclBuilder_.setMessage(builderForValue.build());
7890          }
7891          bitField0_ |= 0x00000010;
7892          return this;
7893        }
7894        /**
7895         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7896         */
7897        public Builder mergeAcl(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto value) {
7898          if (aclBuilder_ == null) {
7899            if (((bitField0_ & 0x00000010) == 0x00000010) &&
7900                acl_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance()) {
7901              acl_ =
7902                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.newBuilder(acl_).mergeFrom(value).buildPartial();
7903            } else {
7904              acl_ = value;
7905            }
7906            onChanged();
7907          } else {
7908            aclBuilder_.mergeFrom(value);
7909          }
7910          bitField0_ |= 0x00000010;
7911          return this;
7912        }
7913        /**
7914         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7915         */
7916        public Builder clearAcl() {
7917          if (aclBuilder_ == null) {
7918            acl_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.getDefaultInstance();
7919            onChanged();
7920          } else {
7921            aclBuilder_.clear();
7922          }
7923          bitField0_ = (bitField0_ & ~0x00000010);
7924          return this;
7925        }
7926        /**
7927         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7928         */
7929        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder getAclBuilder() {
7930          bitField0_ |= 0x00000010;
7931          onChanged();
7932          return getAclFieldBuilder().getBuilder();
7933        }
7934        /**
7935         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7936         */
7937        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder getAclOrBuilder() {
7938          if (aclBuilder_ != null) {
7939            return aclBuilder_.getMessageOrBuilder();
7940          } else {
7941            return acl_;
7942          }
7943        }
7944        /**
7945         * <code>optional .hadoop.hdfs.fsimage.INodeSection.AclFeatureProto acl = 5;</code>
7946         */
7947        private com.google.protobuf.SingleFieldBuilder<
7948            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder> 
7949            getAclFieldBuilder() {
7950          if (aclBuilder_ == null) {
7951            aclBuilder_ = new com.google.protobuf.SingleFieldBuilder<
7952                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProtoOrBuilder>(
7953                    acl_,
7954                    getParentForChildren(),
7955                    isClean());
7956            acl_ = null;
7957          }
7958          return aclBuilder_;
7959        }
7960
7961        // optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;
7962        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
7963        private com.google.protobuf.SingleFieldBuilder<
7964            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder> xAttrsBuilder_;
7965        /**
7966         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7967         */
7968        public boolean hasXAttrs() {
7969          return ((bitField0_ & 0x00000020) == 0x00000020);
7970        }
7971        /**
7972         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7973         */
7974        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto getXAttrs() {
7975          if (xAttrsBuilder_ == null) {
7976            return xAttrs_;
7977          } else {
7978            return xAttrsBuilder_.getMessage();
7979          }
7980        }
7981        /**
7982         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7983         */
7984        public Builder setXAttrs(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto value) {
7985          if (xAttrsBuilder_ == null) {
7986            if (value == null) {
7987              throw new NullPointerException();
7988            }
7989            xAttrs_ = value;
7990            onChanged();
7991          } else {
7992            xAttrsBuilder_.setMessage(value);
7993          }
7994          bitField0_ |= 0x00000020;
7995          return this;
7996        }
7997        /**
7998         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
7999         */
8000        public Builder setXAttrs(
8001            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder builderForValue) {
8002          if (xAttrsBuilder_ == null) {
8003            xAttrs_ = builderForValue.build();
8004            onChanged();
8005          } else {
8006            xAttrsBuilder_.setMessage(builderForValue.build());
8007          }
8008          bitField0_ |= 0x00000020;
8009          return this;
8010        }
8011        /**
8012         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8013         */
8014        public Builder mergeXAttrs(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto value) {
8015          if (xAttrsBuilder_ == null) {
8016            if (((bitField0_ & 0x00000020) == 0x00000020) &&
8017                xAttrs_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance()) {
8018              xAttrs_ =
8019                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.newBuilder(xAttrs_).mergeFrom(value).buildPartial();
8020            } else {
8021              xAttrs_ = value;
8022            }
8023            onChanged();
8024          } else {
8025            xAttrsBuilder_.mergeFrom(value);
8026          }
8027          bitField0_ |= 0x00000020;
8028          return this;
8029        }
8030        /**
8031         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8032         */
8033        public Builder clearXAttrs() {
8034          if (xAttrsBuilder_ == null) {
8035            xAttrs_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.getDefaultInstance();
8036            onChanged();
8037          } else {
8038            xAttrsBuilder_.clear();
8039          }
8040          bitField0_ = (bitField0_ & ~0x00000020);
8041          return this;
8042        }
8043        /**
8044         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8045         */
8046        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder getXAttrsBuilder() {
8047          bitField0_ |= 0x00000020;
8048          onChanged();
8049          return getXAttrsFieldBuilder().getBuilder();
8050        }
8051        /**
8052         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8053         */
8054        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder getXAttrsOrBuilder() {
8055          if (xAttrsBuilder_ != null) {
8056            return xAttrsBuilder_.getMessageOrBuilder();
8057          } else {
8058            return xAttrs_;
8059          }
8060        }
8061        /**
8062         * <code>optional .hadoop.hdfs.fsimage.INodeSection.XAttrFeatureProto xAttrs = 6;</code>
8063         */
8064        private com.google.protobuf.SingleFieldBuilder<
8065            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder> 
8066            getXAttrsFieldBuilder() {
8067          if (xAttrsBuilder_ == null) {
8068            xAttrsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8069                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProtoOrBuilder>(
8070                    xAttrs_,
8071                    getParentForChildren(),
8072                    isClean());
8073            xAttrs_ = null;
8074          }
8075          return xAttrsBuilder_;
8076        }
8077
8078        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INodeDirectory)
8079      }
8080
8081      static {
8082        defaultInstance = new INodeDirectory(true);
8083        defaultInstance.initFields();
8084      }
8085
8086      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INodeDirectory)
8087    }
8088
8089    public interface INodeSymlinkOrBuilder
8090        extends com.google.protobuf.MessageOrBuilder {
8091
8092      // optional fixed64 permission = 1;
8093      /**
8094       * <code>optional fixed64 permission = 1;</code>
8095       */
8096      boolean hasPermission();
8097      /**
8098       * <code>optional fixed64 permission = 1;</code>
8099       */
8100      long getPermission();
8101
8102      // optional bytes target = 2;
8103      /**
8104       * <code>optional bytes target = 2;</code>
8105       */
8106      boolean hasTarget();
8107      /**
8108       * <code>optional bytes target = 2;</code>
8109       */
8110      com.google.protobuf.ByteString getTarget();
8111
8112      // optional uint64 modificationTime = 3;
8113      /**
8114       * <code>optional uint64 modificationTime = 3;</code>
8115       */
8116      boolean hasModificationTime();
8117      /**
8118       * <code>optional uint64 modificationTime = 3;</code>
8119       */
8120      long getModificationTime();
8121
8122      // optional uint64 accessTime = 4;
8123      /**
8124       * <code>optional uint64 accessTime = 4;</code>
8125       */
8126      boolean hasAccessTime();
8127      /**
8128       * <code>optional uint64 accessTime = 4;</code>
8129       */
8130      long getAccessTime();
8131    }
8132    /**
8133     * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeSymlink}
8134     */
8135    public static final class INodeSymlink extends
8136        com.google.protobuf.GeneratedMessage
8137        implements INodeSymlinkOrBuilder {
8138      // Use INodeSymlink.newBuilder() to construct.
8139      private INodeSymlink(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8140        super(builder);
8141        this.unknownFields = builder.getUnknownFields();
8142      }
8143      private INodeSymlink(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8144
8145      private static final INodeSymlink defaultInstance;
8146      public static INodeSymlink getDefaultInstance() {
8147        return defaultInstance;
8148      }
8149
8150      public INodeSymlink getDefaultInstanceForType() {
8151        return defaultInstance;
8152      }
8153
8154      private final com.google.protobuf.UnknownFieldSet unknownFields;
8155      @java.lang.Override
8156      public final com.google.protobuf.UnknownFieldSet
8157          getUnknownFields() {
8158        return this.unknownFields;
8159      }
8160      private INodeSymlink(
8161          com.google.protobuf.CodedInputStream input,
8162          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8163          throws com.google.protobuf.InvalidProtocolBufferException {
8164        initFields();
8165        int mutable_bitField0_ = 0;
8166        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8167            com.google.protobuf.UnknownFieldSet.newBuilder();
8168        try {
8169          boolean done = false;
8170          while (!done) {
8171            int tag = input.readTag();
8172            switch (tag) {
8173              case 0:
8174                done = true;
8175                break;
8176              default: {
8177                if (!parseUnknownField(input, unknownFields,
8178                                       extensionRegistry, tag)) {
8179                  done = true;
8180                }
8181                break;
8182              }
8183              case 9: {
8184                bitField0_ |= 0x00000001;
8185                permission_ = input.readFixed64();
8186                break;
8187              }
8188              case 18: {
8189                bitField0_ |= 0x00000002;
8190                target_ = input.readBytes();
8191                break;
8192              }
8193              case 24: {
8194                bitField0_ |= 0x00000004;
8195                modificationTime_ = input.readUInt64();
8196                break;
8197              }
8198              case 32: {
8199                bitField0_ |= 0x00000008;
8200                accessTime_ = input.readUInt64();
8201                break;
8202              }
8203            }
8204          }
8205        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8206          throw e.setUnfinishedMessage(this);
8207        } catch (java.io.IOException e) {
8208          throw new com.google.protobuf.InvalidProtocolBufferException(
8209              e.getMessage()).setUnfinishedMessage(this);
8210        } finally {
8211          this.unknownFields = unknownFields.build();
8212          makeExtensionsImmutable();
8213        }
8214      }
8215      public static final com.google.protobuf.Descriptors.Descriptor
8216          getDescriptor() {
8217        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
8218      }
8219
8220      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8221          internalGetFieldAccessorTable() {
8222        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable
8223            .ensureFieldAccessorsInitialized(
8224                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder.class);
8225      }
8226
8227      public static com.google.protobuf.Parser<INodeSymlink> PARSER =
8228          new com.google.protobuf.AbstractParser<INodeSymlink>() {
8229        public INodeSymlink parsePartialFrom(
8230            com.google.protobuf.CodedInputStream input,
8231            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8232            throws com.google.protobuf.InvalidProtocolBufferException {
8233          return new INodeSymlink(input, extensionRegistry);
8234        }
8235      };
8236
8237      @java.lang.Override
8238      public com.google.protobuf.Parser<INodeSymlink> getParserForType() {
8239        return PARSER;
8240      }
8241
8242      private int bitField0_;
8243      // optional fixed64 permission = 1;
8244      public static final int PERMISSION_FIELD_NUMBER = 1;
8245      private long permission_;
8246      /**
8247       * <code>optional fixed64 permission = 1;</code>
8248       */
8249      public boolean hasPermission() {
8250        return ((bitField0_ & 0x00000001) == 0x00000001);
8251      }
8252      /**
8253       * <code>optional fixed64 permission = 1;</code>
8254       */
8255      public long getPermission() {
8256        return permission_;
8257      }
8258
8259      // optional bytes target = 2;
8260      public static final int TARGET_FIELD_NUMBER = 2;
8261      private com.google.protobuf.ByteString target_;
8262      /**
8263       * <code>optional bytes target = 2;</code>
8264       */
8265      public boolean hasTarget() {
8266        return ((bitField0_ & 0x00000002) == 0x00000002);
8267      }
8268      /**
8269       * <code>optional bytes target = 2;</code>
8270       */
8271      public com.google.protobuf.ByteString getTarget() {
8272        return target_;
8273      }
8274
8275      // optional uint64 modificationTime = 3;
8276      public static final int MODIFICATIONTIME_FIELD_NUMBER = 3;
8277      private long modificationTime_;
8278      /**
8279       * <code>optional uint64 modificationTime = 3;</code>
8280       */
8281      public boolean hasModificationTime() {
8282        return ((bitField0_ & 0x00000004) == 0x00000004);
8283      }
8284      /**
8285       * <code>optional uint64 modificationTime = 3;</code>
8286       */
8287      public long getModificationTime() {
8288        return modificationTime_;
8289      }
8290
8291      // optional uint64 accessTime = 4;
8292      public static final int ACCESSTIME_FIELD_NUMBER = 4;
8293      private long accessTime_;
8294      /**
8295       * <code>optional uint64 accessTime = 4;</code>
8296       */
8297      public boolean hasAccessTime() {
8298        return ((bitField0_ & 0x00000008) == 0x00000008);
8299      }
8300      /**
8301       * <code>optional uint64 accessTime = 4;</code>
8302       */
8303      public long getAccessTime() {
8304        return accessTime_;
8305      }
8306
8307      private void initFields() {
8308        permission_ = 0L;
8309        target_ = com.google.protobuf.ByteString.EMPTY;
8310        modificationTime_ = 0L;
8311        accessTime_ = 0L;
8312      }
8313      private byte memoizedIsInitialized = -1;
8314      public final boolean isInitialized() {
8315        byte isInitialized = memoizedIsInitialized;
8316        if (isInitialized != -1) return isInitialized == 1;
8317
8318        memoizedIsInitialized = 1;
8319        return true;
8320      }
8321
8322      public void writeTo(com.google.protobuf.CodedOutputStream output)
8323                          throws java.io.IOException {
8324        getSerializedSize();
8325        if (((bitField0_ & 0x00000001) == 0x00000001)) {
8326          output.writeFixed64(1, permission_);
8327        }
8328        if (((bitField0_ & 0x00000002) == 0x00000002)) {
8329          output.writeBytes(2, target_);
8330        }
8331        if (((bitField0_ & 0x00000004) == 0x00000004)) {
8332          output.writeUInt64(3, modificationTime_);
8333        }
8334        if (((bitField0_ & 0x00000008) == 0x00000008)) {
8335          output.writeUInt64(4, accessTime_);
8336        }
8337        getUnknownFields().writeTo(output);
8338      }
8339
8340      private int memoizedSerializedSize = -1;
8341      public int getSerializedSize() {
8342        int size = memoizedSerializedSize;
8343        if (size != -1) return size;
8344
8345        size = 0;
8346        if (((bitField0_ & 0x00000001) == 0x00000001)) {
8347          size += com.google.protobuf.CodedOutputStream
8348            .computeFixed64Size(1, permission_);
8349        }
8350        if (((bitField0_ & 0x00000002) == 0x00000002)) {
8351          size += com.google.protobuf.CodedOutputStream
8352            .computeBytesSize(2, target_);
8353        }
8354        if (((bitField0_ & 0x00000004) == 0x00000004)) {
8355          size += com.google.protobuf.CodedOutputStream
8356            .computeUInt64Size(3, modificationTime_);
8357        }
8358        if (((bitField0_ & 0x00000008) == 0x00000008)) {
8359          size += com.google.protobuf.CodedOutputStream
8360            .computeUInt64Size(4, accessTime_);
8361        }
8362        size += getUnknownFields().getSerializedSize();
8363        memoizedSerializedSize = size;
8364        return size;
8365      }
8366
8367      private static final long serialVersionUID = 0L;
8368      @java.lang.Override
8369      protected java.lang.Object writeReplace()
8370          throws java.io.ObjectStreamException {
8371        return super.writeReplace();
8372      }
8373
8374      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8375          com.google.protobuf.ByteString data)
8376          throws com.google.protobuf.InvalidProtocolBufferException {
8377        return PARSER.parseFrom(data);
8378      }
8379      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8380          com.google.protobuf.ByteString data,
8381          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8382          throws com.google.protobuf.InvalidProtocolBufferException {
8383        return PARSER.parseFrom(data, extensionRegistry);
8384      }
8385      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(byte[] data)
8386          throws com.google.protobuf.InvalidProtocolBufferException {
8387        return PARSER.parseFrom(data);
8388      }
8389      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8390          byte[] data,
8391          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8392          throws com.google.protobuf.InvalidProtocolBufferException {
8393        return PARSER.parseFrom(data, extensionRegistry);
8394      }
8395      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(java.io.InputStream input)
8396          throws java.io.IOException {
8397        return PARSER.parseFrom(input);
8398      }
8399      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8400          java.io.InputStream input,
8401          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8402          throws java.io.IOException {
8403        return PARSER.parseFrom(input, extensionRegistry);
8404      }
8405      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseDelimitedFrom(java.io.InputStream input)
8406          throws java.io.IOException {
8407        return PARSER.parseDelimitedFrom(input);
8408      }
8409      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseDelimitedFrom(
8410          java.io.InputStream input,
8411          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8412          throws java.io.IOException {
8413        return PARSER.parseDelimitedFrom(input, extensionRegistry);
8414      }
8415      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8416          com.google.protobuf.CodedInputStream input)
8417          throws java.io.IOException {
8418        return PARSER.parseFrom(input);
8419      }
8420      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parseFrom(
8421          com.google.protobuf.CodedInputStream input,
8422          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8423          throws java.io.IOException {
8424        return PARSER.parseFrom(input, extensionRegistry);
8425      }
8426
8427      public static Builder newBuilder() { return Builder.create(); }
8428      public Builder newBuilderForType() { return newBuilder(); }
8429      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink prototype) {
8430        return newBuilder().mergeFrom(prototype);
8431      }
8432      public Builder toBuilder() { return newBuilder(this); }
8433
8434      @java.lang.Override
8435      protected Builder newBuilderForType(
8436          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8437        Builder builder = new Builder(parent);
8438        return builder;
8439      }
8440      /**
8441       * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INodeSymlink}
8442       */
8443      public static final class Builder extends
8444          com.google.protobuf.GeneratedMessage.Builder<Builder>
8445         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder {
8446        public static final com.google.protobuf.Descriptors.Descriptor
8447            getDescriptor() {
8448          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
8449        }
8450
8451        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8452            internalGetFieldAccessorTable() {
8453          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable
8454              .ensureFieldAccessorsInitialized(
8455                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder.class);
8456        }
8457
8458        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.newBuilder()
8459        private Builder() {
8460          maybeForceBuilderInitialization();
8461        }
8462
8463        private Builder(
8464            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8465          super(parent);
8466          maybeForceBuilderInitialization();
8467        }
8468        private void maybeForceBuilderInitialization() {
8469          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8470          }
8471        }
8472        private static Builder create() {
8473          return new Builder();
8474        }
8475
8476        public Builder clear() {
8477          super.clear();
8478          permission_ = 0L;
8479          bitField0_ = (bitField0_ & ~0x00000001);
8480          target_ = com.google.protobuf.ByteString.EMPTY;
8481          bitField0_ = (bitField0_ & ~0x00000002);
8482          modificationTime_ = 0L;
8483          bitField0_ = (bitField0_ & ~0x00000004);
8484          accessTime_ = 0L;
8485          bitField0_ = (bitField0_ & ~0x00000008);
8486          return this;
8487        }
8488
8489        public Builder clone() {
8490          return create().mergeFrom(buildPartial());
8491        }
8492
8493        public com.google.protobuf.Descriptors.Descriptor
8494            getDescriptorForType() {
8495          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
8496        }
8497
8498        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getDefaultInstanceForType() {
8499          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
8500        }
8501
8502        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink build() {
8503          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink result = buildPartial();
8504          if (!result.isInitialized()) {
8505            throw newUninitializedMessageException(result);
8506          }
8507          return result;
8508        }
8509
8510        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink buildPartial() {
8511          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink(this);
8512          int from_bitField0_ = bitField0_;
8513          int to_bitField0_ = 0;
8514          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8515            to_bitField0_ |= 0x00000001;
8516          }
8517          result.permission_ = permission_;
8518          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
8519            to_bitField0_ |= 0x00000002;
8520          }
8521          result.target_ = target_;
8522          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
8523            to_bitField0_ |= 0x00000004;
8524          }
8525          result.modificationTime_ = modificationTime_;
8526          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
8527            to_bitField0_ |= 0x00000008;
8528          }
8529          result.accessTime_ = accessTime_;
8530          result.bitField0_ = to_bitField0_;
8531          onBuilt();
8532          return result;
8533        }
8534
8535        public Builder mergeFrom(com.google.protobuf.Message other) {
8536          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink) {
8537            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink)other);
8538          } else {
8539            super.mergeFrom(other);
8540            return this;
8541          }
8542        }
8543
8544        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink other) {
8545          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance()) return this;
8546          if (other.hasPermission()) {
8547            setPermission(other.getPermission());
8548          }
8549          if (other.hasTarget()) {
8550            setTarget(other.getTarget());
8551          }
8552          if (other.hasModificationTime()) {
8553            setModificationTime(other.getModificationTime());
8554          }
8555          if (other.hasAccessTime()) {
8556            setAccessTime(other.getAccessTime());
8557          }
8558          this.mergeUnknownFields(other.getUnknownFields());
8559          return this;
8560        }
8561
8562        public final boolean isInitialized() {
8563          return true;
8564        }
8565
8566        public Builder mergeFrom(
8567            com.google.protobuf.CodedInputStream input,
8568            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8569            throws java.io.IOException {
8570          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink parsedMessage = null;
8571          try {
8572            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8573          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8574            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink) e.getUnfinishedMessage();
8575            throw e;
8576          } finally {
8577            if (parsedMessage != null) {
8578              mergeFrom(parsedMessage);
8579            }
8580          }
8581          return this;
8582        }
8583        private int bitField0_;
8584
8585        // optional fixed64 permission = 1;
8586        private long permission_ ;
8587        /**
8588         * <code>optional fixed64 permission = 1;</code>
8589         */
8590        public boolean hasPermission() {
8591          return ((bitField0_ & 0x00000001) == 0x00000001);
8592        }
8593        /**
8594         * <code>optional fixed64 permission = 1;</code>
8595         */
8596        public long getPermission() {
8597          return permission_;
8598        }
8599        /**
8600         * <code>optional fixed64 permission = 1;</code>
8601         */
8602        public Builder setPermission(long value) {
8603          bitField0_ |= 0x00000001;
8604          permission_ = value;
8605          onChanged();
8606          return this;
8607        }
8608        /**
8609         * <code>optional fixed64 permission = 1;</code>
8610         */
8611        public Builder clearPermission() {
8612          bitField0_ = (bitField0_ & ~0x00000001);
8613          permission_ = 0L;
8614          onChanged();
8615          return this;
8616        }
8617
8618        // optional bytes target = 2;
8619        private com.google.protobuf.ByteString target_ = com.google.protobuf.ByteString.EMPTY;
8620        /**
8621         * <code>optional bytes target = 2;</code>
8622         */
8623        public boolean hasTarget() {
8624          return ((bitField0_ & 0x00000002) == 0x00000002);
8625        }
8626        /**
8627         * <code>optional bytes target = 2;</code>
8628         */
8629        public com.google.protobuf.ByteString getTarget() {
8630          return target_;
8631        }
8632        /**
8633         * <code>optional bytes target = 2;</code>
8634         */
8635        public Builder setTarget(com.google.protobuf.ByteString value) {
8636          if (value == null) {
8637    throw new NullPointerException();
8638  }
8639  bitField0_ |= 0x00000002;
8640          target_ = value;
8641          onChanged();
8642          return this;
8643        }
8644        /**
8645         * <code>optional bytes target = 2;</code>
8646         */
8647        public Builder clearTarget() {
8648          bitField0_ = (bitField0_ & ~0x00000002);
8649          target_ = getDefaultInstance().getTarget();
8650          onChanged();
8651          return this;
8652        }
8653
8654        // optional uint64 modificationTime = 3;
8655        private long modificationTime_ ;
8656        /**
8657         * <code>optional uint64 modificationTime = 3;</code>
8658         */
8659        public boolean hasModificationTime() {
8660          return ((bitField0_ & 0x00000004) == 0x00000004);
8661        }
8662        /**
8663         * <code>optional uint64 modificationTime = 3;</code>
8664         */
8665        public long getModificationTime() {
8666          return modificationTime_;
8667        }
8668        /**
8669         * <code>optional uint64 modificationTime = 3;</code>
8670         */
8671        public Builder setModificationTime(long value) {
8672          bitField0_ |= 0x00000004;
8673          modificationTime_ = value;
8674          onChanged();
8675          return this;
8676        }
8677        /**
8678         * <code>optional uint64 modificationTime = 3;</code>
8679         */
8680        public Builder clearModificationTime() {
8681          bitField0_ = (bitField0_ & ~0x00000004);
8682          modificationTime_ = 0L;
8683          onChanged();
8684          return this;
8685        }
8686
8687        // optional uint64 accessTime = 4;
8688        private long accessTime_ ;
8689        /**
8690         * <code>optional uint64 accessTime = 4;</code>
8691         */
8692        public boolean hasAccessTime() {
8693          return ((bitField0_ & 0x00000008) == 0x00000008);
8694        }
8695        /**
8696         * <code>optional uint64 accessTime = 4;</code>
8697         */
8698        public long getAccessTime() {
8699          return accessTime_;
8700        }
8701        /**
8702         * <code>optional uint64 accessTime = 4;</code>
8703         */
8704        public Builder setAccessTime(long value) {
8705          bitField0_ |= 0x00000008;
8706          accessTime_ = value;
8707          onChanged();
8708          return this;
8709        }
8710        /**
8711         * <code>optional uint64 accessTime = 4;</code>
8712         */
8713        public Builder clearAccessTime() {
8714          bitField0_ = (bitField0_ & ~0x00000008);
8715          accessTime_ = 0L;
8716          onChanged();
8717          return this;
8718        }
8719
8720        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INodeSymlink)
8721      }
8722
8723      static {
8724        defaultInstance = new INodeSymlink(true);
8725        defaultInstance.initFields();
8726      }
8727
8728      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INodeSymlink)
8729    }
8730
8731    public interface INodeOrBuilder
8732        extends com.google.protobuf.MessageOrBuilder {
8733
8734      // required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;
8735      /**
8736       * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
8737       */
8738      boolean hasType();
8739      /**
8740       * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
8741       */
8742      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type getType();
8743
8744      // required uint64 id = 2;
8745      /**
8746       * <code>required uint64 id = 2;</code>
8747       */
8748      boolean hasId();
8749      /**
8750       * <code>required uint64 id = 2;</code>
8751       */
8752      long getId();
8753
8754      // optional bytes name = 3;
8755      /**
8756       * <code>optional bytes name = 3;</code>
8757       */
8758      boolean hasName();
8759      /**
8760       * <code>optional bytes name = 3;</code>
8761       */
8762      com.google.protobuf.ByteString getName();
8763
8764      // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;
8765      /**
8766       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8767       */
8768      boolean hasFile();
8769      /**
8770       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8771       */
8772      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getFile();
8773      /**
8774       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
8775       */
8776      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getFileOrBuilder();
8777
8778      // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;
8779      /**
8780       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8781       */
8782      boolean hasDirectory();
8783      /**
8784       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8785       */
8786      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDirectory();
8787      /**
8788       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
8789       */
8790      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getDirectoryOrBuilder();
8791
8792      // optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;
8793      /**
8794       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8795       */
8796      boolean hasSymlink();
8797      /**
8798       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8799       */
8800      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getSymlink();
8801      /**
8802       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
8803       */
8804      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder getSymlinkOrBuilder();
8805    }
8806    /**
8807     * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INode}
8808     */
8809    public static final class INode extends
8810        com.google.protobuf.GeneratedMessage
8811        implements INodeOrBuilder {
8812      // Use INode.newBuilder() to construct.
8813      private INode(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8814        super(builder);
8815        this.unknownFields = builder.getUnknownFields();
8816      }
8817      private INode(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8818
8819      private static final INode defaultInstance;
8820      public static INode getDefaultInstance() {
8821        return defaultInstance;
8822      }
8823
8824      public INode getDefaultInstanceForType() {
8825        return defaultInstance;
8826      }
8827
8828      private final com.google.protobuf.UnknownFieldSet unknownFields;
8829      @java.lang.Override
8830      public final com.google.protobuf.UnknownFieldSet
8831          getUnknownFields() {
8832        return this.unknownFields;
8833      }
8834      private INode(
8835          com.google.protobuf.CodedInputStream input,
8836          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8837          throws com.google.protobuf.InvalidProtocolBufferException {
8838        initFields();
8839        int mutable_bitField0_ = 0;
8840        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8841            com.google.protobuf.UnknownFieldSet.newBuilder();
8842        try {
8843          boolean done = false;
8844          while (!done) {
8845            int tag = input.readTag();
8846            switch (tag) {
8847              case 0:
8848                done = true;
8849                break;
8850              default: {
8851                if (!parseUnknownField(input, unknownFields,
8852                                       extensionRegistry, tag)) {
8853                  done = true;
8854                }
8855                break;
8856              }
8857              case 8: {
8858                int rawValue = input.readEnum();
8859                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type value = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.valueOf(rawValue);
8860                if (value == null) {
8861                  unknownFields.mergeVarintField(1, rawValue);
8862                } else {
8863                  bitField0_ |= 0x00000001;
8864                  type_ = value;
8865                }
8866                break;
8867              }
8868              case 16: {
8869                bitField0_ |= 0x00000002;
8870                id_ = input.readUInt64();
8871                break;
8872              }
8873              case 26: {
8874                bitField0_ |= 0x00000004;
8875                name_ = input.readBytes();
8876                break;
8877              }
8878              case 34: {
8879                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder subBuilder = null;
8880                if (((bitField0_ & 0x00000008) == 0x00000008)) {
8881                  subBuilder = file_.toBuilder();
8882                }
8883                file_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.PARSER, extensionRegistry);
8884                if (subBuilder != null) {
8885                  subBuilder.mergeFrom(file_);
8886                  file_ = subBuilder.buildPartial();
8887                }
8888                bitField0_ |= 0x00000008;
8889                break;
8890              }
8891              case 42: {
8892                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder subBuilder = null;
8893                if (((bitField0_ & 0x00000010) == 0x00000010)) {
8894                  subBuilder = directory_.toBuilder();
8895                }
8896                directory_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.PARSER, extensionRegistry);
8897                if (subBuilder != null) {
8898                  subBuilder.mergeFrom(directory_);
8899                  directory_ = subBuilder.buildPartial();
8900                }
8901                bitField0_ |= 0x00000010;
8902                break;
8903              }
8904              case 50: {
8905                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder subBuilder = null;
8906                if (((bitField0_ & 0x00000020) == 0x00000020)) {
8907                  subBuilder = symlink_.toBuilder();
8908                }
8909                symlink_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.PARSER, extensionRegistry);
8910                if (subBuilder != null) {
8911                  subBuilder.mergeFrom(symlink_);
8912                  symlink_ = subBuilder.buildPartial();
8913                }
8914                bitField0_ |= 0x00000020;
8915                break;
8916              }
8917            }
8918          }
8919        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8920          throw e.setUnfinishedMessage(this);
8921        } catch (java.io.IOException e) {
8922          throw new com.google.protobuf.InvalidProtocolBufferException(
8923              e.getMessage()).setUnfinishedMessage(this);
8924        } finally {
8925          this.unknownFields = unknownFields.build();
8926          makeExtensionsImmutable();
8927        }
8928      }
8929      public static final com.google.protobuf.Descriptors.Descriptor
8930          getDescriptor() {
8931        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
8932      }
8933
8934      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8935          internalGetFieldAccessorTable() {
8936        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable
8937            .ensureFieldAccessorsInitialized(
8938                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder.class);
8939      }
8940
8941      public static com.google.protobuf.Parser<INode> PARSER =
8942          new com.google.protobuf.AbstractParser<INode>() {
8943        public INode parsePartialFrom(
8944            com.google.protobuf.CodedInputStream input,
8945            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8946            throws com.google.protobuf.InvalidProtocolBufferException {
8947          return new INode(input, extensionRegistry);
8948        }
8949      };
8950
8951      @java.lang.Override
8952      public com.google.protobuf.Parser<INode> getParserForType() {
8953        return PARSER;
8954      }
8955
8956      /**
8957       * Protobuf enum {@code hadoop.hdfs.fsimage.INodeSection.INode.Type}
8958       */
8959      public enum Type
8960          implements com.google.protobuf.ProtocolMessageEnum {
8961        /**
8962         * <code>FILE = 1;</code>
8963         */
8964        FILE(0, 1),
8965        /**
8966         * <code>DIRECTORY = 2;</code>
8967         */
8968        DIRECTORY(1, 2),
8969        /**
8970         * <code>SYMLINK = 3;</code>
8971         */
8972        SYMLINK(2, 3),
8973        ;
8974
8975        /**
8976         * <code>FILE = 1;</code>
8977         */
8978        public static final int FILE_VALUE = 1;
8979        /**
8980         * <code>DIRECTORY = 2;</code>
8981         */
8982        public static final int DIRECTORY_VALUE = 2;
8983        /**
8984         * <code>SYMLINK = 3;</code>
8985         */
8986        public static final int SYMLINK_VALUE = 3;
8987
8988
8989        public final int getNumber() { return value; }
8990
8991        public static Type valueOf(int value) {
8992          switch (value) {
8993            case 1: return FILE;
8994            case 2: return DIRECTORY;
8995            case 3: return SYMLINK;
8996            default: return null;
8997          }
8998        }
8999
9000        public static com.google.protobuf.Internal.EnumLiteMap<Type>
9001            internalGetValueMap() {
9002          return internalValueMap;
9003        }
9004        private static com.google.protobuf.Internal.EnumLiteMap<Type>
9005            internalValueMap =
9006              new com.google.protobuf.Internal.EnumLiteMap<Type>() {
9007                public Type findValueByNumber(int number) {
9008                  return Type.valueOf(number);
9009                }
9010              };
9011
9012        public final com.google.protobuf.Descriptors.EnumValueDescriptor
9013            getValueDescriptor() {
9014          return getDescriptor().getValues().get(index);
9015        }
9016        public final com.google.protobuf.Descriptors.EnumDescriptor
9017            getDescriptorForType() {
9018          return getDescriptor();
9019        }
9020        public static final com.google.protobuf.Descriptors.EnumDescriptor
9021            getDescriptor() {
9022          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDescriptor().getEnumTypes().get(0);
9023        }
9024
9025        private static final Type[] VALUES = values();
9026
9027        public static Type valueOf(
9028            com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
9029          if (desc.getType() != getDescriptor()) {
9030            throw new java.lang.IllegalArgumentException(
9031              "EnumValueDescriptor is not for this type.");
9032          }
9033          return VALUES[desc.getIndex()];
9034        }
9035
9036        private final int index;
9037        private final int value;
9038
9039        private Type(int index, int value) {
9040          this.index = index;
9041          this.value = value;
9042        }
9043
9044        // @@protoc_insertion_point(enum_scope:hadoop.hdfs.fsimage.INodeSection.INode.Type)
9045      }
9046
9047      private int bitField0_;
9048      // required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;
9049      public static final int TYPE_FIELD_NUMBER = 1;
9050      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type type_;
9051      /**
9052       * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9053       */
9054      public boolean hasType() {
9055        return ((bitField0_ & 0x00000001) == 0x00000001);
9056      }
9057      /**
9058       * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9059       */
9060      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type getType() {
9061        return type_;
9062      }
9063
9064      // required uint64 id = 2;
9065      public static final int ID_FIELD_NUMBER = 2;
9066      private long id_;
9067      /**
9068       * <code>required uint64 id = 2;</code>
9069       */
9070      public boolean hasId() {
9071        return ((bitField0_ & 0x00000002) == 0x00000002);
9072      }
9073      /**
9074       * <code>required uint64 id = 2;</code>
9075       */
9076      public long getId() {
9077        return id_;
9078      }
9079
9080      // optional bytes name = 3;
9081      public static final int NAME_FIELD_NUMBER = 3;
9082      private com.google.protobuf.ByteString name_;
9083      /**
9084       * <code>optional bytes name = 3;</code>
9085       */
9086      public boolean hasName() {
9087        return ((bitField0_ & 0x00000004) == 0x00000004);
9088      }
9089      /**
9090       * <code>optional bytes name = 3;</code>
9091       */
9092      public com.google.protobuf.ByteString getName() {
9093        return name_;
9094      }
9095
9096      // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;
9097      public static final int FILE_FIELD_NUMBER = 4;
9098      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile file_;
9099      /**
9100       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9101       */
9102      public boolean hasFile() {
9103        return ((bitField0_ & 0x00000008) == 0x00000008);
9104      }
9105      /**
9106       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9107       */
9108      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getFile() {
9109        return file_;
9110      }
9111      /**
9112       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9113       */
9114      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getFileOrBuilder() {
9115        return file_;
9116      }
9117
9118      // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;
9119      public static final int DIRECTORY_FIELD_NUMBER = 5;
9120      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory directory_;
9121      /**
9122       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9123       */
9124      public boolean hasDirectory() {
9125        return ((bitField0_ & 0x00000010) == 0x00000010);
9126      }
9127      /**
9128       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9129       */
9130      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDirectory() {
9131        return directory_;
9132      }
9133      /**
9134       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9135       */
9136      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getDirectoryOrBuilder() {
9137        return directory_;
9138      }
9139
9140      // optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;
9141      public static final int SYMLINK_FIELD_NUMBER = 6;
9142      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink symlink_;
9143      /**
9144       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9145       */
9146      public boolean hasSymlink() {
9147        return ((bitField0_ & 0x00000020) == 0x00000020);
9148      }
9149      /**
9150       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9151       */
9152      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getSymlink() {
9153        return symlink_;
9154      }
9155      /**
9156       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9157       */
9158      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder getSymlinkOrBuilder() {
9159        return symlink_;
9160      }
9161
9162      private void initFields() {
9163        type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
9164        id_ = 0L;
9165        name_ = com.google.protobuf.ByteString.EMPTY;
9166        file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
9167        directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
9168        symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
9169      }
9170      private byte memoizedIsInitialized = -1;
9171      public final boolean isInitialized() {
9172        byte isInitialized = memoizedIsInitialized;
9173        if (isInitialized != -1) return isInitialized == 1;
9174
9175        if (!hasType()) {
9176          memoizedIsInitialized = 0;
9177          return false;
9178        }
9179        if (!hasId()) {
9180          memoizedIsInitialized = 0;
9181          return false;
9182        }
9183        if (hasFile()) {
9184          if (!getFile().isInitialized()) {
9185            memoizedIsInitialized = 0;
9186            return false;
9187          }
9188        }
9189        if (hasDirectory()) {
9190          if (!getDirectory().isInitialized()) {
9191            memoizedIsInitialized = 0;
9192            return false;
9193          }
9194        }
9195        memoizedIsInitialized = 1;
9196        return true;
9197      }
9198
9199      public void writeTo(com.google.protobuf.CodedOutputStream output)
9200                          throws java.io.IOException {
9201        getSerializedSize();
9202        if (((bitField0_ & 0x00000001) == 0x00000001)) {
9203          output.writeEnum(1, type_.getNumber());
9204        }
9205        if (((bitField0_ & 0x00000002) == 0x00000002)) {
9206          output.writeUInt64(2, id_);
9207        }
9208        if (((bitField0_ & 0x00000004) == 0x00000004)) {
9209          output.writeBytes(3, name_);
9210        }
9211        if (((bitField0_ & 0x00000008) == 0x00000008)) {
9212          output.writeMessage(4, file_);
9213        }
9214        if (((bitField0_ & 0x00000010) == 0x00000010)) {
9215          output.writeMessage(5, directory_);
9216        }
9217        if (((bitField0_ & 0x00000020) == 0x00000020)) {
9218          output.writeMessage(6, symlink_);
9219        }
9220        getUnknownFields().writeTo(output);
9221      }
9222
9223      private int memoizedSerializedSize = -1;
9224      public int getSerializedSize() {
9225        int size = memoizedSerializedSize;
9226        if (size != -1) return size;
9227
9228        size = 0;
9229        if (((bitField0_ & 0x00000001) == 0x00000001)) {
9230          size += com.google.protobuf.CodedOutputStream
9231            .computeEnumSize(1, type_.getNumber());
9232        }
9233        if (((bitField0_ & 0x00000002) == 0x00000002)) {
9234          size += com.google.protobuf.CodedOutputStream
9235            .computeUInt64Size(2, id_);
9236        }
9237        if (((bitField0_ & 0x00000004) == 0x00000004)) {
9238          size += com.google.protobuf.CodedOutputStream
9239            .computeBytesSize(3, name_);
9240        }
9241        if (((bitField0_ & 0x00000008) == 0x00000008)) {
9242          size += com.google.protobuf.CodedOutputStream
9243            .computeMessageSize(4, file_);
9244        }
9245        if (((bitField0_ & 0x00000010) == 0x00000010)) {
9246          size += com.google.protobuf.CodedOutputStream
9247            .computeMessageSize(5, directory_);
9248        }
9249        if (((bitField0_ & 0x00000020) == 0x00000020)) {
9250          size += com.google.protobuf.CodedOutputStream
9251            .computeMessageSize(6, symlink_);
9252        }
9253        size += getUnknownFields().getSerializedSize();
9254        memoizedSerializedSize = size;
9255        return size;
9256      }
9257
9258      private static final long serialVersionUID = 0L;
9259      @java.lang.Override
9260      protected java.lang.Object writeReplace()
9261          throws java.io.ObjectStreamException {
9262        return super.writeReplace();
9263      }
9264
9265      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9266          com.google.protobuf.ByteString data)
9267          throws com.google.protobuf.InvalidProtocolBufferException {
9268        return PARSER.parseFrom(data);
9269      }
9270      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9271          com.google.protobuf.ByteString data,
9272          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9273          throws com.google.protobuf.InvalidProtocolBufferException {
9274        return PARSER.parseFrom(data, extensionRegistry);
9275      }
9276      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(byte[] data)
9277          throws com.google.protobuf.InvalidProtocolBufferException {
9278        return PARSER.parseFrom(data);
9279      }
9280      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9281          byte[] data,
9282          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9283          throws com.google.protobuf.InvalidProtocolBufferException {
9284        return PARSER.parseFrom(data, extensionRegistry);
9285      }
9286      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(java.io.InputStream input)
9287          throws java.io.IOException {
9288        return PARSER.parseFrom(input);
9289      }
9290      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9291          java.io.InputStream input,
9292          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9293          throws java.io.IOException {
9294        return PARSER.parseFrom(input, extensionRegistry);
9295      }
9296      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseDelimitedFrom(java.io.InputStream input)
9297          throws java.io.IOException {
9298        return PARSER.parseDelimitedFrom(input);
9299      }
9300      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseDelimitedFrom(
9301          java.io.InputStream input,
9302          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9303          throws java.io.IOException {
9304        return PARSER.parseDelimitedFrom(input, extensionRegistry);
9305      }
9306      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9307          com.google.protobuf.CodedInputStream input)
9308          throws java.io.IOException {
9309        return PARSER.parseFrom(input);
9310      }
9311      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parseFrom(
9312          com.google.protobuf.CodedInputStream input,
9313          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9314          throws java.io.IOException {
9315        return PARSER.parseFrom(input, extensionRegistry);
9316      }
9317
9318      public static Builder newBuilder() { return Builder.create(); }
9319      public Builder newBuilderForType() { return newBuilder(); }
9320      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode prototype) {
9321        return newBuilder().mergeFrom(prototype);
9322      }
9323      public Builder toBuilder() { return newBuilder(this); }
9324
9325      @java.lang.Override
9326      protected Builder newBuilderForType(
9327          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9328        Builder builder = new Builder(parent);
9329        return builder;
9330      }
9331      /**
9332       * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection.INode}
9333       */
9334      public static final class Builder extends
9335          com.google.protobuf.GeneratedMessage.Builder<Builder>
9336         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder {
9337        public static final com.google.protobuf.Descriptors.Descriptor
9338            getDescriptor() {
9339          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
9340        }
9341
9342        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9343            internalGetFieldAccessorTable() {
9344          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable
9345              .ensureFieldAccessorsInitialized(
9346                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder.class);
9347        }
9348
9349        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.newBuilder()
9350        private Builder() {
9351          maybeForceBuilderInitialization();
9352        }
9353
9354        private Builder(
9355            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9356          super(parent);
9357          maybeForceBuilderInitialization();
9358        }
9359        private void maybeForceBuilderInitialization() {
9360          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9361            getFileFieldBuilder();
9362            getDirectoryFieldBuilder();
9363            getSymlinkFieldBuilder();
9364          }
9365        }
9366        private static Builder create() {
9367          return new Builder();
9368        }
9369
9370        public Builder clear() {
9371          super.clear();
9372          type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
9373          bitField0_ = (bitField0_ & ~0x00000001);
9374          id_ = 0L;
9375          bitField0_ = (bitField0_ & ~0x00000002);
9376          name_ = com.google.protobuf.ByteString.EMPTY;
9377          bitField0_ = (bitField0_ & ~0x00000004);
9378          if (fileBuilder_ == null) {
9379            file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
9380          } else {
9381            fileBuilder_.clear();
9382          }
9383          bitField0_ = (bitField0_ & ~0x00000008);
9384          if (directoryBuilder_ == null) {
9385            directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
9386          } else {
9387            directoryBuilder_.clear();
9388          }
9389          bitField0_ = (bitField0_ & ~0x00000010);
9390          if (symlinkBuilder_ == null) {
9391            symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
9392          } else {
9393            symlinkBuilder_.clear();
9394          }
9395          bitField0_ = (bitField0_ & ~0x00000020);
9396          return this;
9397        }
9398
9399        public Builder clone() {
9400          return create().mergeFrom(buildPartial());
9401        }
9402
9403        public com.google.protobuf.Descriptors.Descriptor
9404            getDescriptorForType() {
9405          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
9406        }
9407
9408        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getDefaultInstanceForType() {
9409          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
9410        }
9411
9412        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode build() {
9413          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode result = buildPartial();
9414          if (!result.isInitialized()) {
9415            throw newUninitializedMessageException(result);
9416          }
9417          return result;
9418        }
9419
9420        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode buildPartial() {
9421          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode(this);
9422          int from_bitField0_ = bitField0_;
9423          int to_bitField0_ = 0;
9424          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9425            to_bitField0_ |= 0x00000001;
9426          }
9427          result.type_ = type_;
9428          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
9429            to_bitField0_ |= 0x00000002;
9430          }
9431          result.id_ = id_;
9432          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
9433            to_bitField0_ |= 0x00000004;
9434          }
9435          result.name_ = name_;
9436          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
9437            to_bitField0_ |= 0x00000008;
9438          }
9439          if (fileBuilder_ == null) {
9440            result.file_ = file_;
9441          } else {
9442            result.file_ = fileBuilder_.build();
9443          }
9444          if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
9445            to_bitField0_ |= 0x00000010;
9446          }
9447          if (directoryBuilder_ == null) {
9448            result.directory_ = directory_;
9449          } else {
9450            result.directory_ = directoryBuilder_.build();
9451          }
9452          if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
9453            to_bitField0_ |= 0x00000020;
9454          }
9455          if (symlinkBuilder_ == null) {
9456            result.symlink_ = symlink_;
9457          } else {
9458            result.symlink_ = symlinkBuilder_.build();
9459          }
9460          result.bitField0_ = to_bitField0_;
9461          onBuilt();
9462          return result;
9463        }
9464
9465        public Builder mergeFrom(com.google.protobuf.Message other) {
9466          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode) {
9467            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode)other);
9468          } else {
9469            super.mergeFrom(other);
9470            return this;
9471          }
9472        }
9473
9474        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode other) {
9475          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance()) return this;
9476          if (other.hasType()) {
9477            setType(other.getType());
9478          }
9479          if (other.hasId()) {
9480            setId(other.getId());
9481          }
9482          if (other.hasName()) {
9483            setName(other.getName());
9484          }
9485          if (other.hasFile()) {
9486            mergeFile(other.getFile());
9487          }
9488          if (other.hasDirectory()) {
9489            mergeDirectory(other.getDirectory());
9490          }
9491          if (other.hasSymlink()) {
9492            mergeSymlink(other.getSymlink());
9493          }
9494          this.mergeUnknownFields(other.getUnknownFields());
9495          return this;
9496        }
9497
9498        public final boolean isInitialized() {
9499          if (!hasType()) {
9500            
9501            return false;
9502          }
9503          if (!hasId()) {
9504            
9505            return false;
9506          }
9507          if (hasFile()) {
9508            if (!getFile().isInitialized()) {
9509              
9510              return false;
9511            }
9512          }
9513          if (hasDirectory()) {
9514            if (!getDirectory().isInitialized()) {
9515              
9516              return false;
9517            }
9518          }
9519          return true;
9520        }
9521
9522        public Builder mergeFrom(
9523            com.google.protobuf.CodedInputStream input,
9524            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9525            throws java.io.IOException {
9526          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode parsedMessage = null;
9527          try {
9528            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9529          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9530            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode) e.getUnfinishedMessage();
9531            throw e;
9532          } finally {
9533            if (parsedMessage != null) {
9534              mergeFrom(parsedMessage);
9535            }
9536          }
9537          return this;
9538        }
9539        private int bitField0_;
9540
9541        // required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;
9542        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
9543        /**
9544         * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9545         */
9546        public boolean hasType() {
9547          return ((bitField0_ & 0x00000001) == 0x00000001);
9548        }
9549        /**
9550         * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9551         */
9552        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type getType() {
9553          return type_;
9554        }
9555        /**
9556         * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9557         */
9558        public Builder setType(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type value) {
9559          if (value == null) {
9560            throw new NullPointerException();
9561          }
9562          bitField0_ |= 0x00000001;
9563          type_ = value;
9564          onChanged();
9565          return this;
9566        }
9567        /**
9568         * <code>required .hadoop.hdfs.fsimage.INodeSection.INode.Type type = 1;</code>
9569         */
9570        public Builder clearType() {
9571          bitField0_ = (bitField0_ & ~0x00000001);
9572          type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Type.FILE;
9573          onChanged();
9574          return this;
9575        }
9576
9577        // required uint64 id = 2;
9578        private long id_ ;
9579        /**
9580         * <code>required uint64 id = 2;</code>
9581         */
9582        public boolean hasId() {
9583          return ((bitField0_ & 0x00000002) == 0x00000002);
9584        }
9585        /**
9586         * <code>required uint64 id = 2;</code>
9587         */
9588        public long getId() {
9589          return id_;
9590        }
9591        /**
9592         * <code>required uint64 id = 2;</code>
9593         */
9594        public Builder setId(long value) {
9595          bitField0_ |= 0x00000002;
9596          id_ = value;
9597          onChanged();
9598          return this;
9599        }
9600        /**
9601         * <code>required uint64 id = 2;</code>
9602         */
9603        public Builder clearId() {
9604          bitField0_ = (bitField0_ & ~0x00000002);
9605          id_ = 0L;
9606          onChanged();
9607          return this;
9608        }
9609
9610        // optional bytes name = 3;
9611        private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
9612        /**
9613         * <code>optional bytes name = 3;</code>
9614         */
9615        public boolean hasName() {
9616          return ((bitField0_ & 0x00000004) == 0x00000004);
9617        }
9618        /**
9619         * <code>optional bytes name = 3;</code>
9620         */
9621        public com.google.protobuf.ByteString getName() {
9622          return name_;
9623        }
9624        /**
9625         * <code>optional bytes name = 3;</code>
9626         */
9627        public Builder setName(com.google.protobuf.ByteString value) {
9628          if (value == null) {
9629    throw new NullPointerException();
9630  }
9631  bitField0_ |= 0x00000004;
9632          name_ = value;
9633          onChanged();
9634          return this;
9635        }
9636        /**
9637         * <code>optional bytes name = 3;</code>
9638         */
9639        public Builder clearName() {
9640          bitField0_ = (bitField0_ & ~0x00000004);
9641          name_ = getDefaultInstance().getName();
9642          onChanged();
9643          return this;
9644        }
9645
9646        // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;
9647        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
9648        private com.google.protobuf.SingleFieldBuilder<
9649            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> fileBuilder_;
9650        /**
9651         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9652         */
9653        public boolean hasFile() {
9654          return ((bitField0_ & 0x00000008) == 0x00000008);
9655        }
9656        /**
9657         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9658         */
9659        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getFile() {
9660          if (fileBuilder_ == null) {
9661            return file_;
9662          } else {
9663            return fileBuilder_.getMessage();
9664          }
9665        }
9666        /**
9667         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9668         */
9669        public Builder setFile(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
9670          if (fileBuilder_ == null) {
9671            if (value == null) {
9672              throw new NullPointerException();
9673            }
9674            file_ = value;
9675            onChanged();
9676          } else {
9677            fileBuilder_.setMessage(value);
9678          }
9679          bitField0_ |= 0x00000008;
9680          return this;
9681        }
9682        /**
9683         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9684         */
9685        public Builder setFile(
9686            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder builderForValue) {
9687          if (fileBuilder_ == null) {
9688            file_ = builderForValue.build();
9689            onChanged();
9690          } else {
9691            fileBuilder_.setMessage(builderForValue.build());
9692          }
9693          bitField0_ |= 0x00000008;
9694          return this;
9695        }
9696        /**
9697         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9698         */
9699        public Builder mergeFile(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
9700          if (fileBuilder_ == null) {
9701            if (((bitField0_ & 0x00000008) == 0x00000008) &&
9702                file_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance()) {
9703              file_ =
9704                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.newBuilder(file_).mergeFrom(value).buildPartial();
9705            } else {
9706              file_ = value;
9707            }
9708            onChanged();
9709          } else {
9710            fileBuilder_.mergeFrom(value);
9711          }
9712          bitField0_ |= 0x00000008;
9713          return this;
9714        }
9715        /**
9716         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9717         */
9718        public Builder clearFile() {
9719          if (fileBuilder_ == null) {
9720            file_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
9721            onChanged();
9722          } else {
9723            fileBuilder_.clear();
9724          }
9725          bitField0_ = (bitField0_ & ~0x00000008);
9726          return this;
9727        }
9728        /**
9729         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9730         */
9731        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder getFileBuilder() {
9732          bitField0_ |= 0x00000008;
9733          onChanged();
9734          return getFileFieldBuilder().getBuilder();
9735        }
9736        /**
9737         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9738         */
9739        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getFileOrBuilder() {
9740          if (fileBuilder_ != null) {
9741            return fileBuilder_.getMessageOrBuilder();
9742          } else {
9743            return file_;
9744          }
9745        }
9746        /**
9747         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile file = 4;</code>
9748         */
9749        private com.google.protobuf.SingleFieldBuilder<
9750            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> 
9751            getFileFieldBuilder() {
9752          if (fileBuilder_ == null) {
9753            fileBuilder_ = new com.google.protobuf.SingleFieldBuilder<
9754                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder>(
9755                    file_,
9756                    getParentForChildren(),
9757                    isClean());
9758            file_ = null;
9759          }
9760          return fileBuilder_;
9761        }
9762
9763        // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;
9764        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
9765        private com.google.protobuf.SingleFieldBuilder<
9766            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> directoryBuilder_;
9767        /**
9768         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9769         */
9770        public boolean hasDirectory() {
9771          return ((bitField0_ & 0x00000010) == 0x00000010);
9772        }
9773        /**
9774         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9775         */
9776        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getDirectory() {
9777          if (directoryBuilder_ == null) {
9778            return directory_;
9779          } else {
9780            return directoryBuilder_.getMessage();
9781          }
9782        }
9783        /**
9784         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9785         */
9786        public Builder setDirectory(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
9787          if (directoryBuilder_ == null) {
9788            if (value == null) {
9789              throw new NullPointerException();
9790            }
9791            directory_ = value;
9792            onChanged();
9793          } else {
9794            directoryBuilder_.setMessage(value);
9795          }
9796          bitField0_ |= 0x00000010;
9797          return this;
9798        }
9799        /**
9800         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9801         */
9802        public Builder setDirectory(
9803            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder builderForValue) {
9804          if (directoryBuilder_ == null) {
9805            directory_ = builderForValue.build();
9806            onChanged();
9807          } else {
9808            directoryBuilder_.setMessage(builderForValue.build());
9809          }
9810          bitField0_ |= 0x00000010;
9811          return this;
9812        }
9813        /**
9814         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9815         */
9816        public Builder mergeDirectory(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
9817          if (directoryBuilder_ == null) {
9818            if (((bitField0_ & 0x00000010) == 0x00000010) &&
9819                directory_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance()) {
9820              directory_ =
9821                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.newBuilder(directory_).mergeFrom(value).buildPartial();
9822            } else {
9823              directory_ = value;
9824            }
9825            onChanged();
9826          } else {
9827            directoryBuilder_.mergeFrom(value);
9828          }
9829          bitField0_ |= 0x00000010;
9830          return this;
9831        }
9832        /**
9833         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9834         */
9835        public Builder clearDirectory() {
9836          if (directoryBuilder_ == null) {
9837            directory_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
9838            onChanged();
9839          } else {
9840            directoryBuilder_.clear();
9841          }
9842          bitField0_ = (bitField0_ & ~0x00000010);
9843          return this;
9844        }
9845        /**
9846         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9847         */
9848        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder getDirectoryBuilder() {
9849          bitField0_ |= 0x00000010;
9850          onChanged();
9851          return getDirectoryFieldBuilder().getBuilder();
9852        }
9853        /**
9854         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9855         */
9856        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getDirectoryOrBuilder() {
9857          if (directoryBuilder_ != null) {
9858            return directoryBuilder_.getMessageOrBuilder();
9859          } else {
9860            return directory_;
9861          }
9862        }
9863        /**
9864         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory directory = 5;</code>
9865         */
9866        private com.google.protobuf.SingleFieldBuilder<
9867            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> 
9868            getDirectoryFieldBuilder() {
9869          if (directoryBuilder_ == null) {
9870            directoryBuilder_ = new com.google.protobuf.SingleFieldBuilder<
9871                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder>(
9872                    directory_,
9873                    getParentForChildren(),
9874                    isClean());
9875            directory_ = null;
9876          }
9877          return directoryBuilder_;
9878        }
9879
9880        // optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;
9881        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
9882        private com.google.protobuf.SingleFieldBuilder<
9883            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder> symlinkBuilder_;
9884        /**
9885         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9886         */
9887        public boolean hasSymlink() {
9888          return ((bitField0_ & 0x00000020) == 0x00000020);
9889        }
9890        /**
9891         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9892         */
9893        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink getSymlink() {
9894          if (symlinkBuilder_ == null) {
9895            return symlink_;
9896          } else {
9897            return symlinkBuilder_.getMessage();
9898          }
9899        }
9900        /**
9901         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9902         */
9903        public Builder setSymlink(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink value) {
9904          if (symlinkBuilder_ == null) {
9905            if (value == null) {
9906              throw new NullPointerException();
9907            }
9908            symlink_ = value;
9909            onChanged();
9910          } else {
9911            symlinkBuilder_.setMessage(value);
9912          }
9913          bitField0_ |= 0x00000020;
9914          return this;
9915        }
9916        /**
9917         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9918         */
9919        public Builder setSymlink(
9920            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder builderForValue) {
9921          if (symlinkBuilder_ == null) {
9922            symlink_ = builderForValue.build();
9923            onChanged();
9924          } else {
9925            symlinkBuilder_.setMessage(builderForValue.build());
9926          }
9927          bitField0_ |= 0x00000020;
9928          return this;
9929        }
9930        /**
9931         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9932         */
9933        public Builder mergeSymlink(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink value) {
9934          if (symlinkBuilder_ == null) {
9935            if (((bitField0_ & 0x00000020) == 0x00000020) &&
9936                symlink_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance()) {
9937              symlink_ =
9938                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.newBuilder(symlink_).mergeFrom(value).buildPartial();
9939            } else {
9940              symlink_ = value;
9941            }
9942            onChanged();
9943          } else {
9944            symlinkBuilder_.mergeFrom(value);
9945          }
9946          bitField0_ |= 0x00000020;
9947          return this;
9948        }
9949        /**
9950         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9951         */
9952        public Builder clearSymlink() {
9953          if (symlinkBuilder_ == null) {
9954            symlink_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.getDefaultInstance();
9955            onChanged();
9956          } else {
9957            symlinkBuilder_.clear();
9958          }
9959          bitField0_ = (bitField0_ & ~0x00000020);
9960          return this;
9961        }
9962        /**
9963         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9964         */
9965        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder getSymlinkBuilder() {
9966          bitField0_ |= 0x00000020;
9967          onChanged();
9968          return getSymlinkFieldBuilder().getBuilder();
9969        }
9970        /**
9971         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9972         */
9973        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder getSymlinkOrBuilder() {
9974          if (symlinkBuilder_ != null) {
9975            return symlinkBuilder_.getMessageOrBuilder();
9976          } else {
9977            return symlink_;
9978          }
9979        }
9980        /**
9981         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeSymlink symlink = 6;</code>
9982         */
9983        private com.google.protobuf.SingleFieldBuilder<
9984            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder> 
9985            getSymlinkFieldBuilder() {
9986          if (symlinkBuilder_ == null) {
9987            symlinkBuilder_ = new com.google.protobuf.SingleFieldBuilder<
9988                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlinkOrBuilder>(
9989                    symlink_,
9990                    getParentForChildren(),
9991                    isClean());
9992            symlink_ = null;
9993          }
9994          return symlinkBuilder_;
9995        }
9996
9997        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection.INode)
9998      }
9999
10000      static {
10001        defaultInstance = new INode(true);
10002        defaultInstance.initFields();
10003      }
10004
10005      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection.INode)
10006    }
10007
10008    private int bitField0_;
10009    // optional uint64 lastInodeId = 1;
10010    public static final int LASTINODEID_FIELD_NUMBER = 1;
10011    private long lastInodeId_;
10012    /**
10013     * <code>optional uint64 lastInodeId = 1;</code>
10014     */
10015    public boolean hasLastInodeId() {
10016      return ((bitField0_ & 0x00000001) == 0x00000001);
10017    }
10018    /**
10019     * <code>optional uint64 lastInodeId = 1;</code>
10020     */
10021    public long getLastInodeId() {
10022      return lastInodeId_;
10023    }
10024
10025    // optional uint64 numInodes = 2;
10026    public static final int NUMINODES_FIELD_NUMBER = 2;
10027    private long numInodes_;
10028    /**
10029     * <code>optional uint64 numInodes = 2;</code>
10030     *
10031     * <pre>
10032     * repeated INodes..
10033     * </pre>
10034     */
10035    public boolean hasNumInodes() {
10036      return ((bitField0_ & 0x00000002) == 0x00000002);
10037    }
10038    /**
10039     * <code>optional uint64 numInodes = 2;</code>
10040     *
10041     * <pre>
10042     * repeated INodes..
10043     * </pre>
10044     */
10045    public long getNumInodes() {
10046      return numInodes_;
10047    }
10048
10049    private void initFields() {
10050      lastInodeId_ = 0L;
10051      numInodes_ = 0L;
10052    }
10053    private byte memoizedIsInitialized = -1;
10054    public final boolean isInitialized() {
10055      byte isInitialized = memoizedIsInitialized;
10056      if (isInitialized != -1) return isInitialized == 1;
10057
10058      memoizedIsInitialized = 1;
10059      return true;
10060    }
10061
10062    public void writeTo(com.google.protobuf.CodedOutputStream output)
10063                        throws java.io.IOException {
10064      getSerializedSize();
10065      if (((bitField0_ & 0x00000001) == 0x00000001)) {
10066        output.writeUInt64(1, lastInodeId_);
10067      }
10068      if (((bitField0_ & 0x00000002) == 0x00000002)) {
10069        output.writeUInt64(2, numInodes_);
10070      }
10071      getUnknownFields().writeTo(output);
10072    }
10073
10074    private int memoizedSerializedSize = -1;
10075    public int getSerializedSize() {
10076      int size = memoizedSerializedSize;
10077      if (size != -1) return size;
10078
10079      size = 0;
10080      if (((bitField0_ & 0x00000001) == 0x00000001)) {
10081        size += com.google.protobuf.CodedOutputStream
10082          .computeUInt64Size(1, lastInodeId_);
10083      }
10084      if (((bitField0_ & 0x00000002) == 0x00000002)) {
10085        size += com.google.protobuf.CodedOutputStream
10086          .computeUInt64Size(2, numInodes_);
10087      }
10088      size += getUnknownFields().getSerializedSize();
10089      memoizedSerializedSize = size;
10090      return size;
10091    }
10092
10093    private static final long serialVersionUID = 0L;
10094    @java.lang.Override
10095    protected java.lang.Object writeReplace()
10096        throws java.io.ObjectStreamException {
10097      return super.writeReplace();
10098    }
10099
10100    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10101        com.google.protobuf.ByteString data)
10102        throws com.google.protobuf.InvalidProtocolBufferException {
10103      return PARSER.parseFrom(data);
10104    }
10105    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10106        com.google.protobuf.ByteString data,
10107        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10108        throws com.google.protobuf.InvalidProtocolBufferException {
10109      return PARSER.parseFrom(data, extensionRegistry);
10110    }
10111    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(byte[] data)
10112        throws com.google.protobuf.InvalidProtocolBufferException {
10113      return PARSER.parseFrom(data);
10114    }
10115    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10116        byte[] data,
10117        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10118        throws com.google.protobuf.InvalidProtocolBufferException {
10119      return PARSER.parseFrom(data, extensionRegistry);
10120    }
10121    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(java.io.InputStream input)
10122        throws java.io.IOException {
10123      return PARSER.parseFrom(input);
10124    }
10125    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10126        java.io.InputStream input,
10127        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10128        throws java.io.IOException {
10129      return PARSER.parseFrom(input, extensionRegistry);
10130    }
10131    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseDelimitedFrom(java.io.InputStream input)
10132        throws java.io.IOException {
10133      return PARSER.parseDelimitedFrom(input);
10134    }
10135    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseDelimitedFrom(
10136        java.io.InputStream input,
10137        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10138        throws java.io.IOException {
10139      return PARSER.parseDelimitedFrom(input, extensionRegistry);
10140    }
10141    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10142        com.google.protobuf.CodedInputStream input)
10143        throws java.io.IOException {
10144      return PARSER.parseFrom(input);
10145    }
10146    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parseFrom(
10147        com.google.protobuf.CodedInputStream input,
10148        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10149        throws java.io.IOException {
10150      return PARSER.parseFrom(input, extensionRegistry);
10151    }
10152
10153    public static Builder newBuilder() { return Builder.create(); }
10154    public Builder newBuilderForType() { return newBuilder(); }
10155    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection prototype) {
10156      return newBuilder().mergeFrom(prototype);
10157    }
10158    public Builder toBuilder() { return newBuilder(this); }
10159
10160    @java.lang.Override
10161    protected Builder newBuilderForType(
10162        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10163      Builder builder = new Builder(parent);
10164      return builder;
10165    }
10166    /**
10167     * Protobuf type {@code hadoop.hdfs.fsimage.INodeSection}
10168     *
10169     * <pre>
10170     **
10171     * Permission is serialized as a 64-bit long. [0:24):[25:48):[48:64) (in Big Endian).
10172     * The first and the second parts are the string ids of the user and
10173     * group name, and the last 16 bits are the permission bits.
10174     *
10175     * Name: INODE
10176     * </pre>
10177     */
10178    public static final class Builder extends
10179        com.google.protobuf.GeneratedMessage.Builder<Builder>
10180       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSectionOrBuilder {
10181      public static final com.google.protobuf.Descriptors.Descriptor
10182          getDescriptor() {
10183        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
10184      }
10185
10186      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10187          internalGetFieldAccessorTable() {
10188        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable
10189            .ensureFieldAccessorsInitialized(
10190                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.Builder.class);
10191      }
10192
10193      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.newBuilder()
10194      private Builder() {
10195        maybeForceBuilderInitialization();
10196      }
10197
10198      private Builder(
10199          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10200        super(parent);
10201        maybeForceBuilderInitialization();
10202      }
10203      private void maybeForceBuilderInitialization() {
10204        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10205        }
10206      }
10207      private static Builder create() {
10208        return new Builder();
10209      }
10210
10211      public Builder clear() {
10212        super.clear();
10213        lastInodeId_ = 0L;
10214        bitField0_ = (bitField0_ & ~0x00000001);
10215        numInodes_ = 0L;
10216        bitField0_ = (bitField0_ & ~0x00000002);
10217        return this;
10218      }
10219
10220      public Builder clone() {
10221        return create().mergeFrom(buildPartial());
10222      }
10223
10224      public com.google.protobuf.Descriptors.Descriptor
10225          getDescriptorForType() {
10226        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
10227      }
10228
10229      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection getDefaultInstanceForType() {
10230        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.getDefaultInstance();
10231      }
10232
10233      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection build() {
10234        org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection result = buildPartial();
10235        if (!result.isInitialized()) {
10236          throw newUninitializedMessageException(result);
10237        }
10238        return result;
10239      }
10240
10241      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection buildPartial() {
10242        org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection(this);
10243        int from_bitField0_ = bitField0_;
10244        int to_bitField0_ = 0;
10245        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10246          to_bitField0_ |= 0x00000001;
10247        }
10248        result.lastInodeId_ = lastInodeId_;
10249        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10250          to_bitField0_ |= 0x00000002;
10251        }
10252        result.numInodes_ = numInodes_;
10253        result.bitField0_ = to_bitField0_;
10254        onBuilt();
10255        return result;
10256      }
10257
10258      public Builder mergeFrom(com.google.protobuf.Message other) {
10259        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection) {
10260          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection)other);
10261        } else {
10262          super.mergeFrom(other);
10263          return this;
10264        }
10265      }
10266
10267      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection other) {
10268        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.getDefaultInstance()) return this;
10269        if (other.hasLastInodeId()) {
10270          setLastInodeId(other.getLastInodeId());
10271        }
10272        if (other.hasNumInodes()) {
10273          setNumInodes(other.getNumInodes());
10274        }
10275        this.mergeUnknownFields(other.getUnknownFields());
10276        return this;
10277      }
10278
10279      public final boolean isInitialized() {
10280        return true;
10281      }
10282
10283      public Builder mergeFrom(
10284          com.google.protobuf.CodedInputStream input,
10285          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10286          throws java.io.IOException {
10287        org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection parsedMessage = null;
10288        try {
10289          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10290        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10291          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection) e.getUnfinishedMessage();
10292          throw e;
10293        } finally {
10294          if (parsedMessage != null) {
10295            mergeFrom(parsedMessage);
10296          }
10297        }
10298        return this;
10299      }
10300      private int bitField0_;
10301
10302      // optional uint64 lastInodeId = 1;
10303      private long lastInodeId_ ;
10304      /**
10305       * <code>optional uint64 lastInodeId = 1;</code>
10306       */
10307      public boolean hasLastInodeId() {
10308        return ((bitField0_ & 0x00000001) == 0x00000001);
10309      }
10310      /**
10311       * <code>optional uint64 lastInodeId = 1;</code>
10312       */
10313      public long getLastInodeId() {
10314        return lastInodeId_;
10315      }
10316      /**
10317       * <code>optional uint64 lastInodeId = 1;</code>
10318       */
10319      public Builder setLastInodeId(long value) {
10320        bitField0_ |= 0x00000001;
10321        lastInodeId_ = value;
10322        onChanged();
10323        return this;
10324      }
10325      /**
10326       * <code>optional uint64 lastInodeId = 1;</code>
10327       */
10328      public Builder clearLastInodeId() {
10329        bitField0_ = (bitField0_ & ~0x00000001);
10330        lastInodeId_ = 0L;
10331        onChanged();
10332        return this;
10333      }
10334
10335      // optional uint64 numInodes = 2;
10336      private long numInodes_ ;
10337      /**
10338       * <code>optional uint64 numInodes = 2;</code>
10339       *
10340       * <pre>
10341       * repeated INodes..
10342       * </pre>
10343       */
10344      public boolean hasNumInodes() {
10345        return ((bitField0_ & 0x00000002) == 0x00000002);
10346      }
10347      /**
10348       * <code>optional uint64 numInodes = 2;</code>
10349       *
10350       * <pre>
10351       * repeated INodes..
10352       * </pre>
10353       */
10354      public long getNumInodes() {
10355        return numInodes_;
10356      }
10357      /**
10358       * <code>optional uint64 numInodes = 2;</code>
10359       *
10360       * <pre>
10361       * repeated INodes..
10362       * </pre>
10363       */
10364      public Builder setNumInodes(long value) {
10365        bitField0_ |= 0x00000002;
10366        numInodes_ = value;
10367        onChanged();
10368        return this;
10369      }
10370      /**
10371       * <code>optional uint64 numInodes = 2;</code>
10372       *
10373       * <pre>
10374       * repeated INodes..
10375       * </pre>
10376       */
10377      public Builder clearNumInodes() {
10378        bitField0_ = (bitField0_ & ~0x00000002);
10379        numInodes_ = 0L;
10380        onChanged();
10381        return this;
10382      }
10383
10384      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeSection)
10385    }
10386
10387    static {
10388      defaultInstance = new INodeSection(true);
10389      defaultInstance.initFields();
10390    }
10391
10392    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeSection)
10393  }
10394
10395  public interface FilesUnderConstructionSectionOrBuilder
10396      extends com.google.protobuf.MessageOrBuilder {
10397  }
10398  /**
10399   * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection}
10400   *
10401   * <pre>
10402   **
10403   * This section records information about under-construction files for
10404   * reconstructing the lease map.
10405   * NAME: FILES_UNDERCONSTRUCTION
10406   * </pre>
10407   */
10408  public static final class FilesUnderConstructionSection extends
10409      com.google.protobuf.GeneratedMessage
10410      implements FilesUnderConstructionSectionOrBuilder {
10411    // Use FilesUnderConstructionSection.newBuilder() to construct.
10412    private FilesUnderConstructionSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10413      super(builder);
10414      this.unknownFields = builder.getUnknownFields();
10415    }
10416    private FilesUnderConstructionSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10417
10418    private static final FilesUnderConstructionSection defaultInstance;
10419    public static FilesUnderConstructionSection getDefaultInstance() {
10420      return defaultInstance;
10421    }
10422
10423    public FilesUnderConstructionSection getDefaultInstanceForType() {
10424      return defaultInstance;
10425    }
10426
10427    private final com.google.protobuf.UnknownFieldSet unknownFields;
10428    @java.lang.Override
10429    public final com.google.protobuf.UnknownFieldSet
10430        getUnknownFields() {
10431      return this.unknownFields;
10432    }
10433    private FilesUnderConstructionSection(
10434        com.google.protobuf.CodedInputStream input,
10435        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10436        throws com.google.protobuf.InvalidProtocolBufferException {
10437      initFields();
10438      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
10439          com.google.protobuf.UnknownFieldSet.newBuilder();
10440      try {
10441        boolean done = false;
10442        while (!done) {
10443          int tag = input.readTag();
10444          switch (tag) {
10445            case 0:
10446              done = true;
10447              break;
10448            default: {
10449              if (!parseUnknownField(input, unknownFields,
10450                                     extensionRegistry, tag)) {
10451                done = true;
10452              }
10453              break;
10454            }
10455          }
10456        }
10457      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10458        throw e.setUnfinishedMessage(this);
10459      } catch (java.io.IOException e) {
10460        throw new com.google.protobuf.InvalidProtocolBufferException(
10461            e.getMessage()).setUnfinishedMessage(this);
10462      } finally {
10463        this.unknownFields = unknownFields.build();
10464        makeExtensionsImmutable();
10465      }
10466    }
10467    public static final com.google.protobuf.Descriptors.Descriptor
10468        getDescriptor() {
10469      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
10470    }
10471
10472    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10473        internalGetFieldAccessorTable() {
10474      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable
10475          .ensureFieldAccessorsInitialized(
10476              org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.Builder.class);
10477    }
10478
10479    public static com.google.protobuf.Parser<FilesUnderConstructionSection> PARSER =
10480        new com.google.protobuf.AbstractParser<FilesUnderConstructionSection>() {
10481      public FilesUnderConstructionSection parsePartialFrom(
10482          com.google.protobuf.CodedInputStream input,
10483          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10484          throws com.google.protobuf.InvalidProtocolBufferException {
10485        return new FilesUnderConstructionSection(input, extensionRegistry);
10486      }
10487    };
10488
10489    @java.lang.Override
10490    public com.google.protobuf.Parser<FilesUnderConstructionSection> getParserForType() {
10491      return PARSER;
10492    }
10493
10494    public interface FileUnderConstructionEntryOrBuilder
10495        extends com.google.protobuf.MessageOrBuilder {
10496
10497      // optional uint64 inodeId = 1;
10498      /**
10499       * <code>optional uint64 inodeId = 1;</code>
10500       */
10501      boolean hasInodeId();
10502      /**
10503       * <code>optional uint64 inodeId = 1;</code>
10504       */
10505      long getInodeId();
10506
10507      // optional string fullPath = 2;
10508      /**
10509       * <code>optional string fullPath = 2;</code>
10510       */
10511      boolean hasFullPath();
10512      /**
10513       * <code>optional string fullPath = 2;</code>
10514       */
10515      java.lang.String getFullPath();
10516      /**
10517       * <code>optional string fullPath = 2;</code>
10518       */
10519      com.google.protobuf.ByteString
10520          getFullPathBytes();
10521    }
10522    /**
10523     * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry}
10524     */
10525    public static final class FileUnderConstructionEntry extends
10526        com.google.protobuf.GeneratedMessage
10527        implements FileUnderConstructionEntryOrBuilder {
10528      // Use FileUnderConstructionEntry.newBuilder() to construct.
10529      private FileUnderConstructionEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10530        super(builder);
10531        this.unknownFields = builder.getUnknownFields();
10532      }
10533      private FileUnderConstructionEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10534
10535      private static final FileUnderConstructionEntry defaultInstance;
10536      public static FileUnderConstructionEntry getDefaultInstance() {
10537        return defaultInstance;
10538      }
10539
10540      public FileUnderConstructionEntry getDefaultInstanceForType() {
10541        return defaultInstance;
10542      }
10543
10544      private final com.google.protobuf.UnknownFieldSet unknownFields;
10545      @java.lang.Override
10546      public final com.google.protobuf.UnknownFieldSet
10547          getUnknownFields() {
10548        return this.unknownFields;
10549      }
10550      private FileUnderConstructionEntry(
10551          com.google.protobuf.CodedInputStream input,
10552          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10553          throws com.google.protobuf.InvalidProtocolBufferException {
10554        initFields();
10555        int mutable_bitField0_ = 0;
10556        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
10557            com.google.protobuf.UnknownFieldSet.newBuilder();
10558        try {
10559          boolean done = false;
10560          while (!done) {
10561            int tag = input.readTag();
10562            switch (tag) {
10563              case 0:
10564                done = true;
10565                break;
10566              default: {
10567                if (!parseUnknownField(input, unknownFields,
10568                                       extensionRegistry, tag)) {
10569                  done = true;
10570                }
10571                break;
10572              }
10573              case 8: {
10574                bitField0_ |= 0x00000001;
10575                inodeId_ = input.readUInt64();
10576                break;
10577              }
10578              case 18: {
10579                bitField0_ |= 0x00000002;
10580                fullPath_ = input.readBytes();
10581                break;
10582              }
10583            }
10584          }
10585        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10586          throw e.setUnfinishedMessage(this);
10587        } catch (java.io.IOException e) {
10588          throw new com.google.protobuf.InvalidProtocolBufferException(
10589              e.getMessage()).setUnfinishedMessage(this);
10590        } finally {
10591          this.unknownFields = unknownFields.build();
10592          makeExtensionsImmutable();
10593        }
10594      }
10595      public static final com.google.protobuf.Descriptors.Descriptor
10596          getDescriptor() {
10597        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
10598      }
10599
10600      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10601          internalGetFieldAccessorTable() {
10602        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable
10603            .ensureFieldAccessorsInitialized(
10604                org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.Builder.class);
10605      }
10606
10607      public static com.google.protobuf.Parser<FileUnderConstructionEntry> PARSER =
10608          new com.google.protobuf.AbstractParser<FileUnderConstructionEntry>() {
10609        public FileUnderConstructionEntry parsePartialFrom(
10610            com.google.protobuf.CodedInputStream input,
10611            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10612            throws com.google.protobuf.InvalidProtocolBufferException {
10613          return new FileUnderConstructionEntry(input, extensionRegistry);
10614        }
10615      };
10616
10617      @java.lang.Override
10618      public com.google.protobuf.Parser<FileUnderConstructionEntry> getParserForType() {
10619        return PARSER;
10620      }
10621
10622      private int bitField0_;
10623      // optional uint64 inodeId = 1;
10624      public static final int INODEID_FIELD_NUMBER = 1;
10625      private long inodeId_;
10626      /**
10627       * <code>optional uint64 inodeId = 1;</code>
10628       */
10629      public boolean hasInodeId() {
10630        return ((bitField0_ & 0x00000001) == 0x00000001);
10631      }
10632      /**
10633       * <code>optional uint64 inodeId = 1;</code>
10634       */
10635      public long getInodeId() {
10636        return inodeId_;
10637      }
10638
10639      // optional string fullPath = 2;
10640      public static final int FULLPATH_FIELD_NUMBER = 2;
10641      private java.lang.Object fullPath_;
10642      /**
10643       * <code>optional string fullPath = 2;</code>
10644       */
10645      public boolean hasFullPath() {
10646        return ((bitField0_ & 0x00000002) == 0x00000002);
10647      }
10648      /**
10649       * <code>optional string fullPath = 2;</code>
10650       */
10651      public java.lang.String getFullPath() {
10652        java.lang.Object ref = fullPath_;
10653        if (ref instanceof java.lang.String) {
10654          return (java.lang.String) ref;
10655        } else {
10656          com.google.protobuf.ByteString bs = 
10657              (com.google.protobuf.ByteString) ref;
10658          java.lang.String s = bs.toStringUtf8();
10659          if (bs.isValidUtf8()) {
10660            fullPath_ = s;
10661          }
10662          return s;
10663        }
10664      }
10665      /**
10666       * <code>optional string fullPath = 2;</code>
10667       */
10668      public com.google.protobuf.ByteString
10669          getFullPathBytes() {
10670        java.lang.Object ref = fullPath_;
10671        if (ref instanceof java.lang.String) {
10672          com.google.protobuf.ByteString b = 
10673              com.google.protobuf.ByteString.copyFromUtf8(
10674                  (java.lang.String) ref);
10675          fullPath_ = b;
10676          return b;
10677        } else {
10678          return (com.google.protobuf.ByteString) ref;
10679        }
10680      }
10681
10682      private void initFields() {
10683        inodeId_ = 0L;
10684        fullPath_ = "";
10685      }
10686      private byte memoizedIsInitialized = -1;
10687      public final boolean isInitialized() {
10688        byte isInitialized = memoizedIsInitialized;
10689        if (isInitialized != -1) return isInitialized == 1;
10690
10691        memoizedIsInitialized = 1;
10692        return true;
10693      }
10694
10695      public void writeTo(com.google.protobuf.CodedOutputStream output)
10696                          throws java.io.IOException {
10697        getSerializedSize();
10698        if (((bitField0_ & 0x00000001) == 0x00000001)) {
10699          output.writeUInt64(1, inodeId_);
10700        }
10701        if (((bitField0_ & 0x00000002) == 0x00000002)) {
10702          output.writeBytes(2, getFullPathBytes());
10703        }
10704        getUnknownFields().writeTo(output);
10705      }
10706
10707      private int memoizedSerializedSize = -1;
10708      public int getSerializedSize() {
10709        int size = memoizedSerializedSize;
10710        if (size != -1) return size;
10711
10712        size = 0;
10713        if (((bitField0_ & 0x00000001) == 0x00000001)) {
10714          size += com.google.protobuf.CodedOutputStream
10715            .computeUInt64Size(1, inodeId_);
10716        }
10717        if (((bitField0_ & 0x00000002) == 0x00000002)) {
10718          size += com.google.protobuf.CodedOutputStream
10719            .computeBytesSize(2, getFullPathBytes());
10720        }
10721        size += getUnknownFields().getSerializedSize();
10722        memoizedSerializedSize = size;
10723        return size;
10724      }
10725
10726      private static final long serialVersionUID = 0L;
10727      @java.lang.Override
10728      protected java.lang.Object writeReplace()
10729          throws java.io.ObjectStreamException {
10730        return super.writeReplace();
10731      }
10732
10733      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10734          com.google.protobuf.ByteString data)
10735          throws com.google.protobuf.InvalidProtocolBufferException {
10736        return PARSER.parseFrom(data);
10737      }
10738      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10739          com.google.protobuf.ByteString data,
10740          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10741          throws com.google.protobuf.InvalidProtocolBufferException {
10742        return PARSER.parseFrom(data, extensionRegistry);
10743      }
10744      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(byte[] data)
10745          throws com.google.protobuf.InvalidProtocolBufferException {
10746        return PARSER.parseFrom(data);
10747      }
10748      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10749          byte[] data,
10750          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10751          throws com.google.protobuf.InvalidProtocolBufferException {
10752        return PARSER.parseFrom(data, extensionRegistry);
10753      }
10754      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(java.io.InputStream input)
10755          throws java.io.IOException {
10756        return PARSER.parseFrom(input);
10757      }
10758      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10759          java.io.InputStream input,
10760          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10761          throws java.io.IOException {
10762        return PARSER.parseFrom(input, extensionRegistry);
10763      }
10764      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseDelimitedFrom(java.io.InputStream input)
10765          throws java.io.IOException {
10766        return PARSER.parseDelimitedFrom(input);
10767      }
10768      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseDelimitedFrom(
10769          java.io.InputStream input,
10770          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10771          throws java.io.IOException {
10772        return PARSER.parseDelimitedFrom(input, extensionRegistry);
10773      }
10774      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10775          com.google.protobuf.CodedInputStream input)
10776          throws java.io.IOException {
10777        return PARSER.parseFrom(input);
10778      }
10779      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parseFrom(
10780          com.google.protobuf.CodedInputStream input,
10781          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10782          throws java.io.IOException {
10783        return PARSER.parseFrom(input, extensionRegistry);
10784      }
10785
10786      public static Builder newBuilder() { return Builder.create(); }
10787      public Builder newBuilderForType() { return newBuilder(); }
10788      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry prototype) {
10789        return newBuilder().mergeFrom(prototype);
10790      }
10791      public Builder toBuilder() { return newBuilder(this); }
10792
10793      @java.lang.Override
10794      protected Builder newBuilderForType(
10795          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10796        Builder builder = new Builder(parent);
10797        return builder;
10798      }
10799      /**
10800       * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry}
10801       */
10802      public static final class Builder extends
10803          com.google.protobuf.GeneratedMessage.Builder<Builder>
10804         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntryOrBuilder {
10805        public static final com.google.protobuf.Descriptors.Descriptor
10806            getDescriptor() {
10807          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
10808        }
10809
10810        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10811            internalGetFieldAccessorTable() {
10812          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable
10813              .ensureFieldAccessorsInitialized(
10814                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.Builder.class);
10815        }
10816
10817        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.newBuilder()
10818        private Builder() {
10819          maybeForceBuilderInitialization();
10820        }
10821
10822        private Builder(
10823            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10824          super(parent);
10825          maybeForceBuilderInitialization();
10826        }
10827        private void maybeForceBuilderInitialization() {
10828          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10829          }
10830        }
10831        private static Builder create() {
10832          return new Builder();
10833        }
10834
10835        public Builder clear() {
10836          super.clear();
10837          inodeId_ = 0L;
10838          bitField0_ = (bitField0_ & ~0x00000001);
10839          fullPath_ = "";
10840          bitField0_ = (bitField0_ & ~0x00000002);
10841          return this;
10842        }
10843
10844        public Builder clone() {
10845          return create().mergeFrom(buildPartial());
10846        }
10847
10848        public com.google.protobuf.Descriptors.Descriptor
10849            getDescriptorForType() {
10850          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
10851        }
10852
10853        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry getDefaultInstanceForType() {
10854          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.getDefaultInstance();
10855        }
10856
10857        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry build() {
10858          org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry result = buildPartial();
10859          if (!result.isInitialized()) {
10860            throw newUninitializedMessageException(result);
10861          }
10862          return result;
10863        }
10864
10865        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry buildPartial() {
10866          org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry(this);
10867          int from_bitField0_ = bitField0_;
10868          int to_bitField0_ = 0;
10869          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10870            to_bitField0_ |= 0x00000001;
10871          }
10872          result.inodeId_ = inodeId_;
10873          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10874            to_bitField0_ |= 0x00000002;
10875          }
10876          result.fullPath_ = fullPath_;
10877          result.bitField0_ = to_bitField0_;
10878          onBuilt();
10879          return result;
10880        }
10881
10882        public Builder mergeFrom(com.google.protobuf.Message other) {
10883          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry) {
10884            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry)other);
10885          } else {
10886            super.mergeFrom(other);
10887            return this;
10888          }
10889        }
10890
10891        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry other) {
10892          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry.getDefaultInstance()) return this;
10893          if (other.hasInodeId()) {
10894            setInodeId(other.getInodeId());
10895          }
10896          if (other.hasFullPath()) {
10897            bitField0_ |= 0x00000002;
10898            fullPath_ = other.fullPath_;
10899            onChanged();
10900          }
10901          this.mergeUnknownFields(other.getUnknownFields());
10902          return this;
10903        }
10904
10905        public final boolean isInitialized() {
10906          return true;
10907        }
10908
10909        public Builder mergeFrom(
10910            com.google.protobuf.CodedInputStream input,
10911            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10912            throws java.io.IOException {
10913          org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry parsedMessage = null;
10914          try {
10915            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10916          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10917            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry) e.getUnfinishedMessage();
10918            throw e;
10919          } finally {
10920            if (parsedMessage != null) {
10921              mergeFrom(parsedMessage);
10922            }
10923          }
10924          return this;
10925        }
10926        private int bitField0_;
10927
10928        // optional uint64 inodeId = 1;
10929        private long inodeId_ ;
10930        /**
10931         * <code>optional uint64 inodeId = 1;</code>
10932         */
10933        public boolean hasInodeId() {
10934          return ((bitField0_ & 0x00000001) == 0x00000001);
10935        }
10936        /**
10937         * <code>optional uint64 inodeId = 1;</code>
10938         */
10939        public long getInodeId() {
10940          return inodeId_;
10941        }
10942        /**
10943         * <code>optional uint64 inodeId = 1;</code>
10944         */
10945        public Builder setInodeId(long value) {
10946          bitField0_ |= 0x00000001;
10947          inodeId_ = value;
10948          onChanged();
10949          return this;
10950        }
10951        /**
10952         * <code>optional uint64 inodeId = 1;</code>
10953         */
10954        public Builder clearInodeId() {
10955          bitField0_ = (bitField0_ & ~0x00000001);
10956          inodeId_ = 0L;
10957          onChanged();
10958          return this;
10959        }
10960
10961        // optional string fullPath = 2;
10962        private java.lang.Object fullPath_ = "";
10963        /**
10964         * <code>optional string fullPath = 2;</code>
10965         */
10966        public boolean hasFullPath() {
10967          return ((bitField0_ & 0x00000002) == 0x00000002);
10968        }
10969        /**
10970         * <code>optional string fullPath = 2;</code>
10971         */
10972        public java.lang.String getFullPath() {
10973          java.lang.Object ref = fullPath_;
10974          if (!(ref instanceof java.lang.String)) {
10975            java.lang.String s = ((com.google.protobuf.ByteString) ref)
10976                .toStringUtf8();
10977            fullPath_ = s;
10978            return s;
10979          } else {
10980            return (java.lang.String) ref;
10981          }
10982        }
10983        /**
10984         * <code>optional string fullPath = 2;</code>
10985         */
10986        public com.google.protobuf.ByteString
10987            getFullPathBytes() {
10988          java.lang.Object ref = fullPath_;
10989          if (ref instanceof String) {
10990            com.google.protobuf.ByteString b = 
10991                com.google.protobuf.ByteString.copyFromUtf8(
10992                    (java.lang.String) ref);
10993            fullPath_ = b;
10994            return b;
10995          } else {
10996            return (com.google.protobuf.ByteString) ref;
10997          }
10998        }
10999        /**
11000         * <code>optional string fullPath = 2;</code>
11001         */
11002        public Builder setFullPath(
11003            java.lang.String value) {
11004          if (value == null) {
11005    throw new NullPointerException();
11006  }
11007  bitField0_ |= 0x00000002;
11008          fullPath_ = value;
11009          onChanged();
11010          return this;
11011        }
11012        /**
11013         * <code>optional string fullPath = 2;</code>
11014         */
11015        public Builder clearFullPath() {
11016          bitField0_ = (bitField0_ & ~0x00000002);
11017          fullPath_ = getDefaultInstance().getFullPath();
11018          onChanged();
11019          return this;
11020        }
11021        /**
11022         * <code>optional string fullPath = 2;</code>
11023         */
11024        public Builder setFullPathBytes(
11025            com.google.protobuf.ByteString value) {
11026          if (value == null) {
11027    throw new NullPointerException();
11028  }
11029  bitField0_ |= 0x00000002;
11030          fullPath_ = value;
11031          onChanged();
11032          return this;
11033        }
11034
11035        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry)
11036      }
11037
11038      static {
11039        defaultInstance = new FileUnderConstructionEntry(true);
11040        defaultInstance.initFields();
11041      }
11042
11043      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection.FileUnderConstructionEntry)
11044    }
11045
11046    private void initFields() {
11047    }
11048    private byte memoizedIsInitialized = -1;
11049    public final boolean isInitialized() {
11050      byte isInitialized = memoizedIsInitialized;
11051      if (isInitialized != -1) return isInitialized == 1;
11052
11053      memoizedIsInitialized = 1;
11054      return true;
11055    }
11056
11057    public void writeTo(com.google.protobuf.CodedOutputStream output)
11058                        throws java.io.IOException {
11059      getSerializedSize();
11060      getUnknownFields().writeTo(output);
11061    }
11062
11063    private int memoizedSerializedSize = -1;
11064    public int getSerializedSize() {
11065      int size = memoizedSerializedSize;
11066      if (size != -1) return size;
11067
11068      size = 0;
11069      size += getUnknownFields().getSerializedSize();
11070      memoizedSerializedSize = size;
11071      return size;
11072    }
11073
11074    private static final long serialVersionUID = 0L;
11075    @java.lang.Override
11076    protected java.lang.Object writeReplace()
11077        throws java.io.ObjectStreamException {
11078      return super.writeReplace();
11079    }
11080
11081    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11082        com.google.protobuf.ByteString data)
11083        throws com.google.protobuf.InvalidProtocolBufferException {
11084      return PARSER.parseFrom(data);
11085    }
11086    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11087        com.google.protobuf.ByteString data,
11088        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11089        throws com.google.protobuf.InvalidProtocolBufferException {
11090      return PARSER.parseFrom(data, extensionRegistry);
11091    }
11092    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(byte[] data)
11093        throws com.google.protobuf.InvalidProtocolBufferException {
11094      return PARSER.parseFrom(data);
11095    }
11096    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11097        byte[] data,
11098        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11099        throws com.google.protobuf.InvalidProtocolBufferException {
11100      return PARSER.parseFrom(data, extensionRegistry);
11101    }
11102    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(java.io.InputStream input)
11103        throws java.io.IOException {
11104      return PARSER.parseFrom(input);
11105    }
11106    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11107        java.io.InputStream input,
11108        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11109        throws java.io.IOException {
11110      return PARSER.parseFrom(input, extensionRegistry);
11111    }
11112    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseDelimitedFrom(java.io.InputStream input)
11113        throws java.io.IOException {
11114      return PARSER.parseDelimitedFrom(input);
11115    }
11116    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseDelimitedFrom(
11117        java.io.InputStream input,
11118        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11119        throws java.io.IOException {
11120      return PARSER.parseDelimitedFrom(input, extensionRegistry);
11121    }
11122    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11123        com.google.protobuf.CodedInputStream input)
11124        throws java.io.IOException {
11125      return PARSER.parseFrom(input);
11126    }
11127    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parseFrom(
11128        com.google.protobuf.CodedInputStream input,
11129        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11130        throws java.io.IOException {
11131      return PARSER.parseFrom(input, extensionRegistry);
11132    }
11133
11134    public static Builder newBuilder() { return Builder.create(); }
11135    public Builder newBuilderForType() { return newBuilder(); }
11136    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection prototype) {
11137      return newBuilder().mergeFrom(prototype);
11138    }
11139    public Builder toBuilder() { return newBuilder(this); }
11140
11141    @java.lang.Override
11142    protected Builder newBuilderForType(
11143        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11144      Builder builder = new Builder(parent);
11145      return builder;
11146    }
11147    /**
11148     * Protobuf type {@code hadoop.hdfs.fsimage.FilesUnderConstructionSection}
11149     *
11150     * <pre>
11151     **
11152     * This section records information about under-construction files for
11153     * reconstructing the lease map.
11154     * NAME: FILES_UNDERCONSTRUCTION
11155     * </pre>
11156     */
11157    public static final class Builder extends
11158        com.google.protobuf.GeneratedMessage.Builder<Builder>
11159       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSectionOrBuilder {
11160      public static final com.google.protobuf.Descriptors.Descriptor
11161          getDescriptor() {
11162        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
11163      }
11164
11165      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11166          internalGetFieldAccessorTable() {
11167        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable
11168            .ensureFieldAccessorsInitialized(
11169                org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.Builder.class);
11170      }
11171
11172      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.newBuilder()
11173      private Builder() {
11174        maybeForceBuilderInitialization();
11175      }
11176
11177      private Builder(
11178          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11179        super(parent);
11180        maybeForceBuilderInitialization();
11181      }
11182      private void maybeForceBuilderInitialization() {
11183        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11184        }
11185      }
11186      private static Builder create() {
11187        return new Builder();
11188      }
11189
11190      public Builder clear() {
11191        super.clear();
11192        return this;
11193      }
11194
11195      public Builder clone() {
11196        return create().mergeFrom(buildPartial());
11197      }
11198
11199      public com.google.protobuf.Descriptors.Descriptor
11200          getDescriptorForType() {
11201        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
11202      }
11203
11204      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection getDefaultInstanceForType() {
11205        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.getDefaultInstance();
11206      }
11207
11208      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection build() {
11209        org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection result = buildPartial();
11210        if (!result.isInitialized()) {
11211          throw newUninitializedMessageException(result);
11212        }
11213        return result;
11214      }
11215
11216      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection buildPartial() {
11217        org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection(this);
11218        onBuilt();
11219        return result;
11220      }
11221
11222      public Builder mergeFrom(com.google.protobuf.Message other) {
11223        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection) {
11224          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection)other);
11225        } else {
11226          super.mergeFrom(other);
11227          return this;
11228        }
11229      }
11230
11231      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection other) {
11232        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.getDefaultInstance()) return this;
11233        this.mergeUnknownFields(other.getUnknownFields());
11234        return this;
11235      }
11236
11237      public final boolean isInitialized() {
11238        return true;
11239      }
11240
11241      public Builder mergeFrom(
11242          com.google.protobuf.CodedInputStream input,
11243          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11244          throws java.io.IOException {
11245        org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection parsedMessage = null;
11246        try {
11247          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11248        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11249          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection) e.getUnfinishedMessage();
11250          throw e;
11251        } finally {
11252          if (parsedMessage != null) {
11253            mergeFrom(parsedMessage);
11254          }
11255        }
11256        return this;
11257      }
11258
11259      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection)
11260    }
11261
11262    static {
11263      defaultInstance = new FilesUnderConstructionSection(true);
11264      defaultInstance.initFields();
11265    }
11266
11267    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.FilesUnderConstructionSection)
11268  }
11269
11270  public interface INodeDirectorySectionOrBuilder
11271      extends com.google.protobuf.MessageOrBuilder {
11272  }
11273  /**
11274   * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection}
11275   *
11276   * <pre>
11277   **
11278   * This section records the children of each directories
11279   * NAME: INODE_DIR
11280   * </pre>
11281   */
11282  public static final class INodeDirectorySection extends
11283      com.google.protobuf.GeneratedMessage
11284      implements INodeDirectorySectionOrBuilder {
11285    // Use INodeDirectorySection.newBuilder() to construct.
11286    private INodeDirectorySection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11287      super(builder);
11288      this.unknownFields = builder.getUnknownFields();
11289    }
11290    private INodeDirectorySection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11291
11292    private static final INodeDirectorySection defaultInstance;
11293    public static INodeDirectorySection getDefaultInstance() {
11294      return defaultInstance;
11295    }
11296
11297    public INodeDirectorySection getDefaultInstanceForType() {
11298      return defaultInstance;
11299    }
11300
11301    private final com.google.protobuf.UnknownFieldSet unknownFields;
11302    @java.lang.Override
11303    public final com.google.protobuf.UnknownFieldSet
11304        getUnknownFields() {
11305      return this.unknownFields;
11306    }
11307    private INodeDirectorySection(
11308        com.google.protobuf.CodedInputStream input,
11309        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11310        throws com.google.protobuf.InvalidProtocolBufferException {
11311      initFields();
11312      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11313          com.google.protobuf.UnknownFieldSet.newBuilder();
11314      try {
11315        boolean done = false;
11316        while (!done) {
11317          int tag = input.readTag();
11318          switch (tag) {
11319            case 0:
11320              done = true;
11321              break;
11322            default: {
11323              if (!parseUnknownField(input, unknownFields,
11324                                     extensionRegistry, tag)) {
11325                done = true;
11326              }
11327              break;
11328            }
11329          }
11330        }
11331      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11332        throw e.setUnfinishedMessage(this);
11333      } catch (java.io.IOException e) {
11334        throw new com.google.protobuf.InvalidProtocolBufferException(
11335            e.getMessage()).setUnfinishedMessage(this);
11336      } finally {
11337        this.unknownFields = unknownFields.build();
11338        makeExtensionsImmutable();
11339      }
11340    }
11341    public static final com.google.protobuf.Descriptors.Descriptor
11342        getDescriptor() {
11343      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
11344    }
11345
11346    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11347        internalGetFieldAccessorTable() {
11348      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable
11349          .ensureFieldAccessorsInitialized(
11350              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.Builder.class);
11351    }
11352
11353    public static com.google.protobuf.Parser<INodeDirectorySection> PARSER =
11354        new com.google.protobuf.AbstractParser<INodeDirectorySection>() {
11355      public INodeDirectorySection parsePartialFrom(
11356          com.google.protobuf.CodedInputStream input,
11357          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11358          throws com.google.protobuf.InvalidProtocolBufferException {
11359        return new INodeDirectorySection(input, extensionRegistry);
11360      }
11361    };
11362
11363    @java.lang.Override
11364    public com.google.protobuf.Parser<INodeDirectorySection> getParserForType() {
11365      return PARSER;
11366    }
11367
11368    public interface DirEntryOrBuilder
11369        extends com.google.protobuf.MessageOrBuilder {
11370
11371      // optional uint64 parent = 1;
11372      /**
11373       * <code>optional uint64 parent = 1;</code>
11374       */
11375      boolean hasParent();
11376      /**
11377       * <code>optional uint64 parent = 1;</code>
11378       */
11379      long getParent();
11380
11381      // repeated uint64 children = 2 [packed = true];
11382      /**
11383       * <code>repeated uint64 children = 2 [packed = true];</code>
11384       *
11385       * <pre>
11386       * children that are not reference nodes
11387       * </pre>
11388       */
11389      java.util.List<java.lang.Long> getChildrenList();
11390      /**
11391       * <code>repeated uint64 children = 2 [packed = true];</code>
11392       *
11393       * <pre>
11394       * children that are not reference nodes
11395       * </pre>
11396       */
11397      int getChildrenCount();
11398      /**
11399       * <code>repeated uint64 children = 2 [packed = true];</code>
11400       *
11401       * <pre>
11402       * children that are not reference nodes
11403       * </pre>
11404       */
11405      long getChildren(int index);
11406
11407      // repeated uint32 refChildren = 3 [packed = true];
11408      /**
11409       * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11410       *
11411       * <pre>
11412       * children that are reference nodes, each element is a reference node id
11413       * </pre>
11414       */
11415      java.util.List<java.lang.Integer> getRefChildrenList();
11416      /**
11417       * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11418       *
11419       * <pre>
11420       * children that are reference nodes, each element is a reference node id
11421       * </pre>
11422       */
11423      int getRefChildrenCount();
11424      /**
11425       * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11426       *
11427       * <pre>
11428       * children that are reference nodes, each element is a reference node id
11429       * </pre>
11430       */
11431      int getRefChildren(int index);
11432    }
11433    /**
11434     * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry}
11435     *
11436     * <pre>
11437     **
11438     * A single DirEntry needs to fit in the default PB max message size of
11439     * 64MB. Please be careful when adding more fields to a DirEntry!
11440     * </pre>
11441     */
11442    public static final class DirEntry extends
11443        com.google.protobuf.GeneratedMessage
11444        implements DirEntryOrBuilder {
11445      // Use DirEntry.newBuilder() to construct.
11446      private DirEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11447        super(builder);
11448        this.unknownFields = builder.getUnknownFields();
11449      }
11450      private DirEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11451
11452      private static final DirEntry defaultInstance;
11453      public static DirEntry getDefaultInstance() {
11454        return defaultInstance;
11455      }
11456
11457      public DirEntry getDefaultInstanceForType() {
11458        return defaultInstance;
11459      }
11460
11461      private final com.google.protobuf.UnknownFieldSet unknownFields;
11462      @java.lang.Override
11463      public final com.google.protobuf.UnknownFieldSet
11464          getUnknownFields() {
11465        return this.unknownFields;
11466      }
11467      private DirEntry(
11468          com.google.protobuf.CodedInputStream input,
11469          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11470          throws com.google.protobuf.InvalidProtocolBufferException {
11471        initFields();
11472        int mutable_bitField0_ = 0;
11473        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11474            com.google.protobuf.UnknownFieldSet.newBuilder();
11475        try {
11476          boolean done = false;
11477          while (!done) {
11478            int tag = input.readTag();
11479            switch (tag) {
11480              case 0:
11481                done = true;
11482                break;
11483              default: {
11484                if (!parseUnknownField(input, unknownFields,
11485                                       extensionRegistry, tag)) {
11486                  done = true;
11487                }
11488                break;
11489              }
11490              case 8: {
11491                bitField0_ |= 0x00000001;
11492                parent_ = input.readUInt64();
11493                break;
11494              }
11495              case 16: {
11496                if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
11497                  children_ = new java.util.ArrayList<java.lang.Long>();
11498                  mutable_bitField0_ |= 0x00000002;
11499                }
11500                children_.add(input.readUInt64());
11501                break;
11502              }
11503              case 18: {
11504                int length = input.readRawVarint32();
11505                int limit = input.pushLimit(length);
11506                if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) {
11507                  children_ = new java.util.ArrayList<java.lang.Long>();
11508                  mutable_bitField0_ |= 0x00000002;
11509                }
11510                while (input.getBytesUntilLimit() > 0) {
11511                  children_.add(input.readUInt64());
11512                }
11513                input.popLimit(limit);
11514                break;
11515              }
11516              case 24: {
11517                if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
11518                  refChildren_ = new java.util.ArrayList<java.lang.Integer>();
11519                  mutable_bitField0_ |= 0x00000004;
11520                }
11521                refChildren_.add(input.readUInt32());
11522                break;
11523              }
11524              case 26: {
11525                int length = input.readRawVarint32();
11526                int limit = input.pushLimit(length);
11527                if (!((mutable_bitField0_ & 0x00000004) == 0x00000004) && input.getBytesUntilLimit() > 0) {
11528                  refChildren_ = new java.util.ArrayList<java.lang.Integer>();
11529                  mutable_bitField0_ |= 0x00000004;
11530                }
11531                while (input.getBytesUntilLimit() > 0) {
11532                  refChildren_.add(input.readUInt32());
11533                }
11534                input.popLimit(limit);
11535                break;
11536              }
11537            }
11538          }
11539        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11540          throw e.setUnfinishedMessage(this);
11541        } catch (java.io.IOException e) {
11542          throw new com.google.protobuf.InvalidProtocolBufferException(
11543              e.getMessage()).setUnfinishedMessage(this);
11544        } finally {
11545          if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
11546            children_ = java.util.Collections.unmodifiableList(children_);
11547          }
11548          if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
11549            refChildren_ = java.util.Collections.unmodifiableList(refChildren_);
11550          }
11551          this.unknownFields = unknownFields.build();
11552          makeExtensionsImmutable();
11553        }
11554      }
11555      public static final com.google.protobuf.Descriptors.Descriptor
11556          getDescriptor() {
11557        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
11558      }
11559
11560      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11561          internalGetFieldAccessorTable() {
11562        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable
11563            .ensureFieldAccessorsInitialized(
11564                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.Builder.class);
11565      }
11566
11567      public static com.google.protobuf.Parser<DirEntry> PARSER =
11568          new com.google.protobuf.AbstractParser<DirEntry>() {
11569        public DirEntry parsePartialFrom(
11570            com.google.protobuf.CodedInputStream input,
11571            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11572            throws com.google.protobuf.InvalidProtocolBufferException {
11573          return new DirEntry(input, extensionRegistry);
11574        }
11575      };
11576
11577      @java.lang.Override
11578      public com.google.protobuf.Parser<DirEntry> getParserForType() {
11579        return PARSER;
11580      }
11581
11582      private int bitField0_;
11583      // optional uint64 parent = 1;
11584      public static final int PARENT_FIELD_NUMBER = 1;
11585      private long parent_;
11586      /**
11587       * <code>optional uint64 parent = 1;</code>
11588       */
11589      public boolean hasParent() {
11590        return ((bitField0_ & 0x00000001) == 0x00000001);
11591      }
11592      /**
11593       * <code>optional uint64 parent = 1;</code>
11594       */
11595      public long getParent() {
11596        return parent_;
11597      }
11598
11599      // repeated uint64 children = 2 [packed = true];
11600      public static final int CHILDREN_FIELD_NUMBER = 2;
11601      private java.util.List<java.lang.Long> children_;
11602      /**
11603       * <code>repeated uint64 children = 2 [packed = true];</code>
11604       *
11605       * <pre>
11606       * children that are not reference nodes
11607       * </pre>
11608       */
11609      public java.util.List<java.lang.Long>
11610          getChildrenList() {
11611        return children_;
11612      }
11613      /**
11614       * <code>repeated uint64 children = 2 [packed = true];</code>
11615       *
11616       * <pre>
11617       * children that are not reference nodes
11618       * </pre>
11619       */
11620      public int getChildrenCount() {
11621        return children_.size();
11622      }
11623      /**
11624       * <code>repeated uint64 children = 2 [packed = true];</code>
11625       *
11626       * <pre>
11627       * children that are not reference nodes
11628       * </pre>
11629       */
11630      public long getChildren(int index) {
11631        return children_.get(index);
11632      }
11633      private int childrenMemoizedSerializedSize = -1;
11634
11635      // repeated uint32 refChildren = 3 [packed = true];
11636      public static final int REFCHILDREN_FIELD_NUMBER = 3;
11637      private java.util.List<java.lang.Integer> refChildren_;
11638      /**
11639       * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11640       *
11641       * <pre>
11642       * children that are reference nodes, each element is a reference node id
11643       * </pre>
11644       */
11645      public java.util.List<java.lang.Integer>
11646          getRefChildrenList() {
11647        return refChildren_;
11648      }
11649      /**
11650       * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11651       *
11652       * <pre>
11653       * children that are reference nodes, each element is a reference node id
11654       * </pre>
11655       */
11656      public int getRefChildrenCount() {
11657        return refChildren_.size();
11658      }
11659      /**
11660       * <code>repeated uint32 refChildren = 3 [packed = true];</code>
11661       *
11662       * <pre>
11663       * children that are reference nodes, each element is a reference node id
11664       * </pre>
11665       */
11666      public int getRefChildren(int index) {
11667        return refChildren_.get(index);
11668      }
11669      private int refChildrenMemoizedSerializedSize = -1;
11670
11671      private void initFields() {
11672        parent_ = 0L;
11673        children_ = java.util.Collections.emptyList();
11674        refChildren_ = java.util.Collections.emptyList();
11675      }
11676      private byte memoizedIsInitialized = -1;
11677      public final boolean isInitialized() {
11678        byte isInitialized = memoizedIsInitialized;
11679        if (isInitialized != -1) return isInitialized == 1;
11680
11681        memoizedIsInitialized = 1;
11682        return true;
11683      }
11684
11685      public void writeTo(com.google.protobuf.CodedOutputStream output)
11686                          throws java.io.IOException {
11687        getSerializedSize();
11688        if (((bitField0_ & 0x00000001) == 0x00000001)) {
11689          output.writeUInt64(1, parent_);
11690        }
11691        if (getChildrenList().size() > 0) {
11692          output.writeRawVarint32(18);
11693          output.writeRawVarint32(childrenMemoizedSerializedSize);
11694        }
11695        for (int i = 0; i < children_.size(); i++) {
11696          output.writeUInt64NoTag(children_.get(i));
11697        }
11698        if (getRefChildrenList().size() > 0) {
11699          output.writeRawVarint32(26);
11700          output.writeRawVarint32(refChildrenMemoizedSerializedSize);
11701        }
11702        for (int i = 0; i < refChildren_.size(); i++) {
11703          output.writeUInt32NoTag(refChildren_.get(i));
11704        }
11705        getUnknownFields().writeTo(output);
11706      }
11707
11708      private int memoizedSerializedSize = -1;
11709      public int getSerializedSize() {
11710        int size = memoizedSerializedSize;
11711        if (size != -1) return size;
11712
11713        size = 0;
11714        if (((bitField0_ & 0x00000001) == 0x00000001)) {
11715          size += com.google.protobuf.CodedOutputStream
11716            .computeUInt64Size(1, parent_);
11717        }
11718        {
11719          int dataSize = 0;
11720          for (int i = 0; i < children_.size(); i++) {
11721            dataSize += com.google.protobuf.CodedOutputStream
11722              .computeUInt64SizeNoTag(children_.get(i));
11723          }
11724          size += dataSize;
11725          if (!getChildrenList().isEmpty()) {
11726            size += 1;
11727            size += com.google.protobuf.CodedOutputStream
11728                .computeInt32SizeNoTag(dataSize);
11729          }
11730          childrenMemoizedSerializedSize = dataSize;
11731        }
11732        {
11733          int dataSize = 0;
11734          for (int i = 0; i < refChildren_.size(); i++) {
11735            dataSize += com.google.protobuf.CodedOutputStream
11736              .computeUInt32SizeNoTag(refChildren_.get(i));
11737          }
11738          size += dataSize;
11739          if (!getRefChildrenList().isEmpty()) {
11740            size += 1;
11741            size += com.google.protobuf.CodedOutputStream
11742                .computeInt32SizeNoTag(dataSize);
11743          }
11744          refChildrenMemoizedSerializedSize = dataSize;
11745        }
11746        size += getUnknownFields().getSerializedSize();
11747        memoizedSerializedSize = size;
11748        return size;
11749      }
11750
11751      private static final long serialVersionUID = 0L;
11752      @java.lang.Override
11753      protected java.lang.Object writeReplace()
11754          throws java.io.ObjectStreamException {
11755        return super.writeReplace();
11756      }
11757
11758      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11759          com.google.protobuf.ByteString data)
11760          throws com.google.protobuf.InvalidProtocolBufferException {
11761        return PARSER.parseFrom(data);
11762      }
11763      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11764          com.google.protobuf.ByteString data,
11765          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11766          throws com.google.protobuf.InvalidProtocolBufferException {
11767        return PARSER.parseFrom(data, extensionRegistry);
11768      }
11769      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(byte[] data)
11770          throws com.google.protobuf.InvalidProtocolBufferException {
11771        return PARSER.parseFrom(data);
11772      }
11773      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11774          byte[] data,
11775          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11776          throws com.google.protobuf.InvalidProtocolBufferException {
11777        return PARSER.parseFrom(data, extensionRegistry);
11778      }
11779      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(java.io.InputStream input)
11780          throws java.io.IOException {
11781        return PARSER.parseFrom(input);
11782      }
11783      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11784          java.io.InputStream input,
11785          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11786          throws java.io.IOException {
11787        return PARSER.parseFrom(input, extensionRegistry);
11788      }
11789      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseDelimitedFrom(java.io.InputStream input)
11790          throws java.io.IOException {
11791        return PARSER.parseDelimitedFrom(input);
11792      }
11793      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseDelimitedFrom(
11794          java.io.InputStream input,
11795          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11796          throws java.io.IOException {
11797        return PARSER.parseDelimitedFrom(input, extensionRegistry);
11798      }
11799      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11800          com.google.protobuf.CodedInputStream input)
11801          throws java.io.IOException {
11802        return PARSER.parseFrom(input);
11803      }
11804      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parseFrom(
11805          com.google.protobuf.CodedInputStream input,
11806          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11807          throws java.io.IOException {
11808        return PARSER.parseFrom(input, extensionRegistry);
11809      }
11810
11811      public static Builder newBuilder() { return Builder.create(); }
11812      public Builder newBuilderForType() { return newBuilder(); }
11813      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry prototype) {
11814        return newBuilder().mergeFrom(prototype);
11815      }
11816      public Builder toBuilder() { return newBuilder(this); }
11817
11818      @java.lang.Override
11819      protected Builder newBuilderForType(
11820          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11821        Builder builder = new Builder(parent);
11822        return builder;
11823      }
11824      /**
11825       * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry}
11826       *
11827       * <pre>
11828       **
11829       * A single DirEntry needs to fit in the default PB max message size of
11830       * 64MB. Please be careful when adding more fields to a DirEntry!
11831       * </pre>
11832       */
11833      public static final class Builder extends
11834          com.google.protobuf.GeneratedMessage.Builder<Builder>
11835         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntryOrBuilder {
11836        public static final com.google.protobuf.Descriptors.Descriptor
11837            getDescriptor() {
11838          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
11839        }
11840
11841        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11842            internalGetFieldAccessorTable() {
11843          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable
11844              .ensureFieldAccessorsInitialized(
11845                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.Builder.class);
11846        }
11847
11848        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.newBuilder()
11849        private Builder() {
11850          maybeForceBuilderInitialization();
11851        }
11852
11853        private Builder(
11854            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11855          super(parent);
11856          maybeForceBuilderInitialization();
11857        }
11858        private void maybeForceBuilderInitialization() {
11859          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11860          }
11861        }
11862        private static Builder create() {
11863          return new Builder();
11864        }
11865
11866        public Builder clear() {
11867          super.clear();
11868          parent_ = 0L;
11869          bitField0_ = (bitField0_ & ~0x00000001);
11870          children_ = java.util.Collections.emptyList();
11871          bitField0_ = (bitField0_ & ~0x00000002);
11872          refChildren_ = java.util.Collections.emptyList();
11873          bitField0_ = (bitField0_ & ~0x00000004);
11874          return this;
11875        }
11876
11877        public Builder clone() {
11878          return create().mergeFrom(buildPartial());
11879        }
11880
11881        public com.google.protobuf.Descriptors.Descriptor
11882            getDescriptorForType() {
11883          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
11884        }
11885
11886        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry getDefaultInstanceForType() {
11887          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.getDefaultInstance();
11888        }
11889
11890        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry build() {
11891          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry result = buildPartial();
11892          if (!result.isInitialized()) {
11893            throw newUninitializedMessageException(result);
11894          }
11895          return result;
11896        }
11897
11898        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry buildPartial() {
11899          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry(this);
11900          int from_bitField0_ = bitField0_;
11901          int to_bitField0_ = 0;
11902          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
11903            to_bitField0_ |= 0x00000001;
11904          }
11905          result.parent_ = parent_;
11906          if (((bitField0_ & 0x00000002) == 0x00000002)) {
11907            children_ = java.util.Collections.unmodifiableList(children_);
11908            bitField0_ = (bitField0_ & ~0x00000002);
11909          }
11910          result.children_ = children_;
11911          if (((bitField0_ & 0x00000004) == 0x00000004)) {
11912            refChildren_ = java.util.Collections.unmodifiableList(refChildren_);
11913            bitField0_ = (bitField0_ & ~0x00000004);
11914          }
11915          result.refChildren_ = refChildren_;
11916          result.bitField0_ = to_bitField0_;
11917          onBuilt();
11918          return result;
11919        }
11920
11921        public Builder mergeFrom(com.google.protobuf.Message other) {
11922          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry) {
11923            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry)other);
11924          } else {
11925            super.mergeFrom(other);
11926            return this;
11927          }
11928        }
11929
11930        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry other) {
11931          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry.getDefaultInstance()) return this;
11932          if (other.hasParent()) {
11933            setParent(other.getParent());
11934          }
11935          if (!other.children_.isEmpty()) {
11936            if (children_.isEmpty()) {
11937              children_ = other.children_;
11938              bitField0_ = (bitField0_ & ~0x00000002);
11939            } else {
11940              ensureChildrenIsMutable();
11941              children_.addAll(other.children_);
11942            }
11943            onChanged();
11944          }
11945          if (!other.refChildren_.isEmpty()) {
11946            if (refChildren_.isEmpty()) {
11947              refChildren_ = other.refChildren_;
11948              bitField0_ = (bitField0_ & ~0x00000004);
11949            } else {
11950              ensureRefChildrenIsMutable();
11951              refChildren_.addAll(other.refChildren_);
11952            }
11953            onChanged();
11954          }
11955          this.mergeUnknownFields(other.getUnknownFields());
11956          return this;
11957        }
11958
11959        public final boolean isInitialized() {
11960          return true;
11961        }
11962
11963        public Builder mergeFrom(
11964            com.google.protobuf.CodedInputStream input,
11965            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11966            throws java.io.IOException {
11967          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry parsedMessage = null;
11968          try {
11969            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11970          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11971            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry) e.getUnfinishedMessage();
11972            throw e;
11973          } finally {
11974            if (parsedMessage != null) {
11975              mergeFrom(parsedMessage);
11976            }
11977          }
11978          return this;
11979        }
11980        private int bitField0_;
11981
11982        // optional uint64 parent = 1;
11983        private long parent_ ;
11984        /**
11985         * <code>optional uint64 parent = 1;</code>
11986         */
11987        public boolean hasParent() {
11988          return ((bitField0_ & 0x00000001) == 0x00000001);
11989        }
11990        /**
11991         * <code>optional uint64 parent = 1;</code>
11992         */
11993        public long getParent() {
11994          return parent_;
11995        }
11996        /**
11997         * <code>optional uint64 parent = 1;</code>
11998         */
11999        public Builder setParent(long value) {
12000          bitField0_ |= 0x00000001;
12001          parent_ = value;
12002          onChanged();
12003          return this;
12004        }
12005        /**
12006         * <code>optional uint64 parent = 1;</code>
12007         */
12008        public Builder clearParent() {
12009          bitField0_ = (bitField0_ & ~0x00000001);
12010          parent_ = 0L;
12011          onChanged();
12012          return this;
12013        }
12014
12015        // repeated uint64 children = 2 [packed = true];
12016        private java.util.List<java.lang.Long> children_ = java.util.Collections.emptyList();
12017        private void ensureChildrenIsMutable() {
12018          if (!((bitField0_ & 0x00000002) == 0x00000002)) {
12019            children_ = new java.util.ArrayList<java.lang.Long>(children_);
12020            bitField0_ |= 0x00000002;
12021           }
12022        }
12023        /**
12024         * <code>repeated uint64 children = 2 [packed = true];</code>
12025         *
12026         * <pre>
12027         * children that are not reference nodes
12028         * </pre>
12029         */
12030        public java.util.List<java.lang.Long>
12031            getChildrenList() {
12032          return java.util.Collections.unmodifiableList(children_);
12033        }
12034        /**
12035         * <code>repeated uint64 children = 2 [packed = true];</code>
12036         *
12037         * <pre>
12038         * children that are not reference nodes
12039         * </pre>
12040         */
12041        public int getChildrenCount() {
12042          return children_.size();
12043        }
12044        /**
12045         * <code>repeated uint64 children = 2 [packed = true];</code>
12046         *
12047         * <pre>
12048         * children that are not reference nodes
12049         * </pre>
12050         */
12051        public long getChildren(int index) {
12052          return children_.get(index);
12053        }
12054        /**
12055         * <code>repeated uint64 children = 2 [packed = true];</code>
12056         *
12057         * <pre>
12058         * children that are not reference nodes
12059         * </pre>
12060         */
12061        public Builder setChildren(
12062            int index, long value) {
12063          ensureChildrenIsMutable();
12064          children_.set(index, value);
12065          onChanged();
12066          return this;
12067        }
12068        /**
12069         * <code>repeated uint64 children = 2 [packed = true];</code>
12070         *
12071         * <pre>
12072         * children that are not reference nodes
12073         * </pre>
12074         */
12075        public Builder addChildren(long value) {
12076          ensureChildrenIsMutable();
12077          children_.add(value);
12078          onChanged();
12079          return this;
12080        }
12081        /**
12082         * <code>repeated uint64 children = 2 [packed = true];</code>
12083         *
12084         * <pre>
12085         * children that are not reference nodes
12086         * </pre>
12087         */
12088        public Builder addAllChildren(
12089            java.lang.Iterable<? extends java.lang.Long> values) {
12090          ensureChildrenIsMutable();
12091          super.addAll(values, children_);
12092          onChanged();
12093          return this;
12094        }
12095        /**
12096         * <code>repeated uint64 children = 2 [packed = true];</code>
12097         *
12098         * <pre>
12099         * children that are not reference nodes
12100         * </pre>
12101         */
12102        public Builder clearChildren() {
12103          children_ = java.util.Collections.emptyList();
12104          bitField0_ = (bitField0_ & ~0x00000002);
12105          onChanged();
12106          return this;
12107        }
12108
12109        // repeated uint32 refChildren = 3 [packed = true];
12110        private java.util.List<java.lang.Integer> refChildren_ = java.util.Collections.emptyList();
12111        private void ensureRefChildrenIsMutable() {
12112          if (!((bitField0_ & 0x00000004) == 0x00000004)) {
12113            refChildren_ = new java.util.ArrayList<java.lang.Integer>(refChildren_);
12114            bitField0_ |= 0x00000004;
12115           }
12116        }
12117        /**
12118         * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12119         *
12120         * <pre>
12121         * children that are reference nodes, each element is a reference node id
12122         * </pre>
12123         */
12124        public java.util.List<java.lang.Integer>
12125            getRefChildrenList() {
12126          return java.util.Collections.unmodifiableList(refChildren_);
12127        }
12128        /**
12129         * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12130         *
12131         * <pre>
12132         * children that are reference nodes, each element is a reference node id
12133         * </pre>
12134         */
12135        public int getRefChildrenCount() {
12136          return refChildren_.size();
12137        }
12138        /**
12139         * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12140         *
12141         * <pre>
12142         * children that are reference nodes, each element is a reference node id
12143         * </pre>
12144         */
12145        public int getRefChildren(int index) {
12146          return refChildren_.get(index);
12147        }
12148        /**
12149         * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12150         *
12151         * <pre>
12152         * children that are reference nodes, each element is a reference node id
12153         * </pre>
12154         */
12155        public Builder setRefChildren(
12156            int index, int value) {
12157          ensureRefChildrenIsMutable();
12158          refChildren_.set(index, value);
12159          onChanged();
12160          return this;
12161        }
12162        /**
12163         * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12164         *
12165         * <pre>
12166         * children that are reference nodes, each element is a reference node id
12167         * </pre>
12168         */
12169        public Builder addRefChildren(int value) {
12170          ensureRefChildrenIsMutable();
12171          refChildren_.add(value);
12172          onChanged();
12173          return this;
12174        }
12175        /**
12176         * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12177         *
12178         * <pre>
12179         * children that are reference nodes, each element is a reference node id
12180         * </pre>
12181         */
12182        public Builder addAllRefChildren(
12183            java.lang.Iterable<? extends java.lang.Integer> values) {
12184          ensureRefChildrenIsMutable();
12185          super.addAll(values, refChildren_);
12186          onChanged();
12187          return this;
12188        }
12189        /**
12190         * <code>repeated uint32 refChildren = 3 [packed = true];</code>
12191         *
12192         * <pre>
12193         * children that are reference nodes, each element is a reference node id
12194         * </pre>
12195         */
12196        public Builder clearRefChildren() {
12197          refChildren_ = java.util.Collections.emptyList();
12198          bitField0_ = (bitField0_ & ~0x00000004);
12199          onChanged();
12200          return this;
12201        }
12202
12203        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry)
12204      }
12205
12206      static {
12207        defaultInstance = new DirEntry(true);
12208        defaultInstance.initFields();
12209      }
12210
12211      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeDirectorySection.DirEntry)
12212    }
12213
12214    private void initFields() {
12215    }
12216    private byte memoizedIsInitialized = -1;
12217    public final boolean isInitialized() {
12218      byte isInitialized = memoizedIsInitialized;
12219      if (isInitialized != -1) return isInitialized == 1;
12220
12221      memoizedIsInitialized = 1;
12222      return true;
12223    }
12224
12225    public void writeTo(com.google.protobuf.CodedOutputStream output)
12226                        throws java.io.IOException {
12227      getSerializedSize();
12228      getUnknownFields().writeTo(output);
12229    }
12230
12231    private int memoizedSerializedSize = -1;
12232    public int getSerializedSize() {
12233      int size = memoizedSerializedSize;
12234      if (size != -1) return size;
12235
12236      size = 0;
12237      size += getUnknownFields().getSerializedSize();
12238      memoizedSerializedSize = size;
12239      return size;
12240    }
12241
12242    private static final long serialVersionUID = 0L;
12243    @java.lang.Override
12244    protected java.lang.Object writeReplace()
12245        throws java.io.ObjectStreamException {
12246      return super.writeReplace();
12247    }
12248
12249    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12250        com.google.protobuf.ByteString data)
12251        throws com.google.protobuf.InvalidProtocolBufferException {
12252      return PARSER.parseFrom(data);
12253    }
12254    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12255        com.google.protobuf.ByteString data,
12256        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12257        throws com.google.protobuf.InvalidProtocolBufferException {
12258      return PARSER.parseFrom(data, extensionRegistry);
12259    }
12260    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(byte[] data)
12261        throws com.google.protobuf.InvalidProtocolBufferException {
12262      return PARSER.parseFrom(data);
12263    }
12264    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12265        byte[] data,
12266        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12267        throws com.google.protobuf.InvalidProtocolBufferException {
12268      return PARSER.parseFrom(data, extensionRegistry);
12269    }
12270    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(java.io.InputStream input)
12271        throws java.io.IOException {
12272      return PARSER.parseFrom(input);
12273    }
12274    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12275        java.io.InputStream input,
12276        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12277        throws java.io.IOException {
12278      return PARSER.parseFrom(input, extensionRegistry);
12279    }
12280    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseDelimitedFrom(java.io.InputStream input)
12281        throws java.io.IOException {
12282      return PARSER.parseDelimitedFrom(input);
12283    }
12284    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseDelimitedFrom(
12285        java.io.InputStream input,
12286        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12287        throws java.io.IOException {
12288      return PARSER.parseDelimitedFrom(input, extensionRegistry);
12289    }
12290    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12291        com.google.protobuf.CodedInputStream input)
12292        throws java.io.IOException {
12293      return PARSER.parseFrom(input);
12294    }
12295    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parseFrom(
12296        com.google.protobuf.CodedInputStream input,
12297        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12298        throws java.io.IOException {
12299      return PARSER.parseFrom(input, extensionRegistry);
12300    }
12301
12302    public static Builder newBuilder() { return Builder.create(); }
12303    public Builder newBuilderForType() { return newBuilder(); }
12304    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection prototype) {
12305      return newBuilder().mergeFrom(prototype);
12306    }
12307    public Builder toBuilder() { return newBuilder(this); }
12308
12309    @java.lang.Override
12310    protected Builder newBuilderForType(
12311        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12312      Builder builder = new Builder(parent);
12313      return builder;
12314    }
12315    /**
12316     * Protobuf type {@code hadoop.hdfs.fsimage.INodeDirectorySection}
12317     *
12318     * <pre>
12319     **
12320     * This section records the children of each directories
12321     * NAME: INODE_DIR
12322     * </pre>
12323     */
12324    public static final class Builder extends
12325        com.google.protobuf.GeneratedMessage.Builder<Builder>
12326       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySectionOrBuilder {
12327      public static final com.google.protobuf.Descriptors.Descriptor
12328          getDescriptor() {
12329        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
12330      }
12331
12332      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12333          internalGetFieldAccessorTable() {
12334        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable
12335            .ensureFieldAccessorsInitialized(
12336                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.Builder.class);
12337      }
12338
12339      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.newBuilder()
12340      private Builder() {
12341        maybeForceBuilderInitialization();
12342      }
12343
12344      private Builder(
12345          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12346        super(parent);
12347        maybeForceBuilderInitialization();
12348      }
12349      private void maybeForceBuilderInitialization() {
12350        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12351        }
12352      }
12353      private static Builder create() {
12354        return new Builder();
12355      }
12356
12357      public Builder clear() {
12358        super.clear();
12359        return this;
12360      }
12361
12362      public Builder clone() {
12363        return create().mergeFrom(buildPartial());
12364      }
12365
12366      public com.google.protobuf.Descriptors.Descriptor
12367          getDescriptorForType() {
12368        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
12369      }
12370
12371      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection getDefaultInstanceForType() {
12372        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.getDefaultInstance();
12373      }
12374
12375      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection build() {
12376        org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection result = buildPartial();
12377        if (!result.isInitialized()) {
12378          throw newUninitializedMessageException(result);
12379        }
12380        return result;
12381      }
12382
12383      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection buildPartial() {
12384        org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection(this);
12385        onBuilt();
12386        return result;
12387      }
12388
12389      public Builder mergeFrom(com.google.protobuf.Message other) {
12390        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection) {
12391          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection)other);
12392        } else {
12393          super.mergeFrom(other);
12394          return this;
12395        }
12396      }
12397
12398      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection other) {
12399        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.getDefaultInstance()) return this;
12400        this.mergeUnknownFields(other.getUnknownFields());
12401        return this;
12402      }
12403
12404      public final boolean isInitialized() {
12405        return true;
12406      }
12407
12408      public Builder mergeFrom(
12409          com.google.protobuf.CodedInputStream input,
12410          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12411          throws java.io.IOException {
12412        org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection parsedMessage = null;
12413        try {
12414          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
12415        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12416          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection) e.getUnfinishedMessage();
12417          throw e;
12418        } finally {
12419          if (parsedMessage != null) {
12420            mergeFrom(parsedMessage);
12421          }
12422        }
12423        return this;
12424      }
12425
12426      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeDirectorySection)
12427    }
12428
12429    static {
12430      defaultInstance = new INodeDirectorySection(true);
12431      defaultInstance.initFields();
12432    }
12433
12434    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeDirectorySection)
12435  }
12436
12437  public interface INodeReferenceSectionOrBuilder
12438      extends com.google.protobuf.MessageOrBuilder {
12439  }
12440  /**
12441   * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection}
12442   */
12443  public static final class INodeReferenceSection extends
12444      com.google.protobuf.GeneratedMessage
12445      implements INodeReferenceSectionOrBuilder {
12446    // Use INodeReferenceSection.newBuilder() to construct.
12447    private INodeReferenceSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12448      super(builder);
12449      this.unknownFields = builder.getUnknownFields();
12450    }
12451    private INodeReferenceSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12452
12453    private static final INodeReferenceSection defaultInstance;
12454    public static INodeReferenceSection getDefaultInstance() {
12455      return defaultInstance;
12456    }
12457
12458    public INodeReferenceSection getDefaultInstanceForType() {
12459      return defaultInstance;
12460    }
12461
12462    private final com.google.protobuf.UnknownFieldSet unknownFields;
12463    @java.lang.Override
12464    public final com.google.protobuf.UnknownFieldSet
12465        getUnknownFields() {
12466      return this.unknownFields;
12467    }
12468    private INodeReferenceSection(
12469        com.google.protobuf.CodedInputStream input,
12470        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12471        throws com.google.protobuf.InvalidProtocolBufferException {
12472      initFields();
12473      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12474          com.google.protobuf.UnknownFieldSet.newBuilder();
12475      try {
12476        boolean done = false;
12477        while (!done) {
12478          int tag = input.readTag();
12479          switch (tag) {
12480            case 0:
12481              done = true;
12482              break;
12483            default: {
12484              if (!parseUnknownField(input, unknownFields,
12485                                     extensionRegistry, tag)) {
12486                done = true;
12487              }
12488              break;
12489            }
12490          }
12491        }
12492      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12493        throw e.setUnfinishedMessage(this);
12494      } catch (java.io.IOException e) {
12495        throw new com.google.protobuf.InvalidProtocolBufferException(
12496            e.getMessage()).setUnfinishedMessage(this);
12497      } finally {
12498        this.unknownFields = unknownFields.build();
12499        makeExtensionsImmutable();
12500      }
12501    }
12502    public static final com.google.protobuf.Descriptors.Descriptor
12503        getDescriptor() {
12504      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
12505    }
12506
12507    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12508        internalGetFieldAccessorTable() {
12509      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable
12510          .ensureFieldAccessorsInitialized(
12511              org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.Builder.class);
12512    }
12513
12514    public static com.google.protobuf.Parser<INodeReferenceSection> PARSER =
12515        new com.google.protobuf.AbstractParser<INodeReferenceSection>() {
12516      public INodeReferenceSection parsePartialFrom(
12517          com.google.protobuf.CodedInputStream input,
12518          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12519          throws com.google.protobuf.InvalidProtocolBufferException {
12520        return new INodeReferenceSection(input, extensionRegistry);
12521      }
12522    };
12523
12524    @java.lang.Override
12525    public com.google.protobuf.Parser<INodeReferenceSection> getParserForType() {
12526      return PARSER;
12527    }
12528
12529    public interface INodeReferenceOrBuilder
12530        extends com.google.protobuf.MessageOrBuilder {
12531
12532      // optional uint64 referredId = 1;
12533      /**
12534       * <code>optional uint64 referredId = 1;</code>
12535       *
12536       * <pre>
12537       * id of the referred inode
12538       * </pre>
12539       */
12540      boolean hasReferredId();
12541      /**
12542       * <code>optional uint64 referredId = 1;</code>
12543       *
12544       * <pre>
12545       * id of the referred inode
12546       * </pre>
12547       */
12548      long getReferredId();
12549
12550      // optional bytes name = 2;
12551      /**
12552       * <code>optional bytes name = 2;</code>
12553       *
12554       * <pre>
12555       * local name recorded in WithName
12556       * </pre>
12557       */
12558      boolean hasName();
12559      /**
12560       * <code>optional bytes name = 2;</code>
12561       *
12562       * <pre>
12563       * local name recorded in WithName
12564       * </pre>
12565       */
12566      com.google.protobuf.ByteString getName();
12567
12568      // optional uint32 dstSnapshotId = 3;
12569      /**
12570       * <code>optional uint32 dstSnapshotId = 3;</code>
12571       *
12572       * <pre>
12573       * recorded in DstReference
12574       * </pre>
12575       */
12576      boolean hasDstSnapshotId();
12577      /**
12578       * <code>optional uint32 dstSnapshotId = 3;</code>
12579       *
12580       * <pre>
12581       * recorded in DstReference
12582       * </pre>
12583       */
12584      int getDstSnapshotId();
12585
12586      // optional uint32 lastSnapshotId = 4;
12587      /**
12588       * <code>optional uint32 lastSnapshotId = 4;</code>
12589       *
12590       * <pre>
12591       * recorded in WithName
12592       * </pre>
12593       */
12594      boolean hasLastSnapshotId();
12595      /**
12596       * <code>optional uint32 lastSnapshotId = 4;</code>
12597       *
12598       * <pre>
12599       * recorded in WithName
12600       * </pre>
12601       */
12602      int getLastSnapshotId();
12603    }
12604    /**
12605     * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference}
12606     */
12607    public static final class INodeReference extends
12608        com.google.protobuf.GeneratedMessage
12609        implements INodeReferenceOrBuilder {
12610      // Use INodeReference.newBuilder() to construct.
12611      private INodeReference(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12612        super(builder);
12613        this.unknownFields = builder.getUnknownFields();
12614      }
12615      private INodeReference(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12616
12617      private static final INodeReference defaultInstance;
12618      public static INodeReference getDefaultInstance() {
12619        return defaultInstance;
12620      }
12621
12622      public INodeReference getDefaultInstanceForType() {
12623        return defaultInstance;
12624      }
12625
12626      private final com.google.protobuf.UnknownFieldSet unknownFields;
12627      @java.lang.Override
12628      public final com.google.protobuf.UnknownFieldSet
12629          getUnknownFields() {
12630        return this.unknownFields;
12631      }
12632      private INodeReference(
12633          com.google.protobuf.CodedInputStream input,
12634          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12635          throws com.google.protobuf.InvalidProtocolBufferException {
12636        initFields();
12637        int mutable_bitField0_ = 0;
12638        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12639            com.google.protobuf.UnknownFieldSet.newBuilder();
12640        try {
12641          boolean done = false;
12642          while (!done) {
12643            int tag = input.readTag();
12644            switch (tag) {
12645              case 0:
12646                done = true;
12647                break;
12648              default: {
12649                if (!parseUnknownField(input, unknownFields,
12650                                       extensionRegistry, tag)) {
12651                  done = true;
12652                }
12653                break;
12654              }
12655              case 8: {
12656                bitField0_ |= 0x00000001;
12657                referredId_ = input.readUInt64();
12658                break;
12659              }
12660              case 18: {
12661                bitField0_ |= 0x00000002;
12662                name_ = input.readBytes();
12663                break;
12664              }
12665              case 24: {
12666                bitField0_ |= 0x00000004;
12667                dstSnapshotId_ = input.readUInt32();
12668                break;
12669              }
12670              case 32: {
12671                bitField0_ |= 0x00000008;
12672                lastSnapshotId_ = input.readUInt32();
12673                break;
12674              }
12675            }
12676          }
12677        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12678          throw e.setUnfinishedMessage(this);
12679        } catch (java.io.IOException e) {
12680          throw new com.google.protobuf.InvalidProtocolBufferException(
12681              e.getMessage()).setUnfinishedMessage(this);
12682        } finally {
12683          this.unknownFields = unknownFields.build();
12684          makeExtensionsImmutable();
12685        }
12686      }
12687      public static final com.google.protobuf.Descriptors.Descriptor
12688          getDescriptor() {
12689        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
12690      }
12691
12692      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12693          internalGetFieldAccessorTable() {
12694        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable
12695            .ensureFieldAccessorsInitialized(
12696                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.Builder.class);
12697      }
12698
12699      public static com.google.protobuf.Parser<INodeReference> PARSER =
12700          new com.google.protobuf.AbstractParser<INodeReference>() {
12701        public INodeReference parsePartialFrom(
12702            com.google.protobuf.CodedInputStream input,
12703            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12704            throws com.google.protobuf.InvalidProtocolBufferException {
12705          return new INodeReference(input, extensionRegistry);
12706        }
12707      };
12708
12709      @java.lang.Override
12710      public com.google.protobuf.Parser<INodeReference> getParserForType() {
12711        return PARSER;
12712      }
12713
12714      private int bitField0_;
12715      // optional uint64 referredId = 1;
12716      public static final int REFERREDID_FIELD_NUMBER = 1;
12717      private long referredId_;
12718      /**
12719       * <code>optional uint64 referredId = 1;</code>
12720       *
12721       * <pre>
12722       * id of the referred inode
12723       * </pre>
12724       */
12725      public boolean hasReferredId() {
12726        return ((bitField0_ & 0x00000001) == 0x00000001);
12727      }
12728      /**
12729       * <code>optional uint64 referredId = 1;</code>
12730       *
12731       * <pre>
12732       * id of the referred inode
12733       * </pre>
12734       */
12735      public long getReferredId() {
12736        return referredId_;
12737      }
12738
12739      // optional bytes name = 2;
12740      public static final int NAME_FIELD_NUMBER = 2;
12741      private com.google.protobuf.ByteString name_;
12742      /**
12743       * <code>optional bytes name = 2;</code>
12744       *
12745       * <pre>
12746       * local name recorded in WithName
12747       * </pre>
12748       */
12749      public boolean hasName() {
12750        return ((bitField0_ & 0x00000002) == 0x00000002);
12751      }
12752      /**
12753       * <code>optional bytes name = 2;</code>
12754       *
12755       * <pre>
12756       * local name recorded in WithName
12757       * </pre>
12758       */
12759      public com.google.protobuf.ByteString getName() {
12760        return name_;
12761      }
12762
12763      // optional uint32 dstSnapshotId = 3;
12764      public static final int DSTSNAPSHOTID_FIELD_NUMBER = 3;
12765      private int dstSnapshotId_;
12766      /**
12767       * <code>optional uint32 dstSnapshotId = 3;</code>
12768       *
12769       * <pre>
12770       * recorded in DstReference
12771       * </pre>
12772       */
12773      public boolean hasDstSnapshotId() {
12774        return ((bitField0_ & 0x00000004) == 0x00000004);
12775      }
12776      /**
12777       * <code>optional uint32 dstSnapshotId = 3;</code>
12778       *
12779       * <pre>
12780       * recorded in DstReference
12781       * </pre>
12782       */
12783      public int getDstSnapshotId() {
12784        return dstSnapshotId_;
12785      }
12786
12787      // optional uint32 lastSnapshotId = 4;
12788      public static final int LASTSNAPSHOTID_FIELD_NUMBER = 4;
12789      private int lastSnapshotId_;
12790      /**
12791       * <code>optional uint32 lastSnapshotId = 4;</code>
12792       *
12793       * <pre>
12794       * recorded in WithName
12795       * </pre>
12796       */
12797      public boolean hasLastSnapshotId() {
12798        return ((bitField0_ & 0x00000008) == 0x00000008);
12799      }
12800      /**
12801       * <code>optional uint32 lastSnapshotId = 4;</code>
12802       *
12803       * <pre>
12804       * recorded in WithName
12805       * </pre>
12806       */
12807      public int getLastSnapshotId() {
12808        return lastSnapshotId_;
12809      }
12810
12811      private void initFields() {
12812        referredId_ = 0L;
12813        name_ = com.google.protobuf.ByteString.EMPTY;
12814        dstSnapshotId_ = 0;
12815        lastSnapshotId_ = 0;
12816      }
12817      private byte memoizedIsInitialized = -1;
12818      public final boolean isInitialized() {
12819        byte isInitialized = memoizedIsInitialized;
12820        if (isInitialized != -1) return isInitialized == 1;
12821
12822        memoizedIsInitialized = 1;
12823        return true;
12824      }
12825
12826      public void writeTo(com.google.protobuf.CodedOutputStream output)
12827                          throws java.io.IOException {
12828        getSerializedSize();
12829        if (((bitField0_ & 0x00000001) == 0x00000001)) {
12830          output.writeUInt64(1, referredId_);
12831        }
12832        if (((bitField0_ & 0x00000002) == 0x00000002)) {
12833          output.writeBytes(2, name_);
12834        }
12835        if (((bitField0_ & 0x00000004) == 0x00000004)) {
12836          output.writeUInt32(3, dstSnapshotId_);
12837        }
12838        if (((bitField0_ & 0x00000008) == 0x00000008)) {
12839          output.writeUInt32(4, lastSnapshotId_);
12840        }
12841        getUnknownFields().writeTo(output);
12842      }
12843
12844      private int memoizedSerializedSize = -1;
12845      public int getSerializedSize() {
12846        int size = memoizedSerializedSize;
12847        if (size != -1) return size;
12848
12849        size = 0;
12850        if (((bitField0_ & 0x00000001) == 0x00000001)) {
12851          size += com.google.protobuf.CodedOutputStream
12852            .computeUInt64Size(1, referredId_);
12853        }
12854        if (((bitField0_ & 0x00000002) == 0x00000002)) {
12855          size += com.google.protobuf.CodedOutputStream
12856            .computeBytesSize(2, name_);
12857        }
12858        if (((bitField0_ & 0x00000004) == 0x00000004)) {
12859          size += com.google.protobuf.CodedOutputStream
12860            .computeUInt32Size(3, dstSnapshotId_);
12861        }
12862        if (((bitField0_ & 0x00000008) == 0x00000008)) {
12863          size += com.google.protobuf.CodedOutputStream
12864            .computeUInt32Size(4, lastSnapshotId_);
12865        }
12866        size += getUnknownFields().getSerializedSize();
12867        memoizedSerializedSize = size;
12868        return size;
12869      }
12870
12871      private static final long serialVersionUID = 0L;
12872      @java.lang.Override
12873      protected java.lang.Object writeReplace()
12874          throws java.io.ObjectStreamException {
12875        return super.writeReplace();
12876      }
12877
12878      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
12879          com.google.protobuf.ByteString data)
12880          throws com.google.protobuf.InvalidProtocolBufferException {
12881        return PARSER.parseFrom(data);
12882      }
12883      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
12884          com.google.protobuf.ByteString data,
12885          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12886          throws com.google.protobuf.InvalidProtocolBufferException {
12887        return PARSER.parseFrom(data, extensionRegistry);
12888      }
12889      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(byte[] data)
12890          throws com.google.protobuf.InvalidProtocolBufferException {
12891        return PARSER.parseFrom(data);
12892      }
12893      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
12894          byte[] data,
12895          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12896          throws com.google.protobuf.InvalidProtocolBufferException {
12897        return PARSER.parseFrom(data, extensionRegistry);
12898      }
12899      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(java.io.InputStream input)
12900          throws java.io.IOException {
12901        return PARSER.parseFrom(input);
12902      }
12903      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
12904          java.io.InputStream input,
12905          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12906          throws java.io.IOException {
12907        return PARSER.parseFrom(input, extensionRegistry);
12908      }
12909      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseDelimitedFrom(java.io.InputStream input)
12910          throws java.io.IOException {
12911        return PARSER.parseDelimitedFrom(input);
12912      }
12913      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseDelimitedFrom(
12914          java.io.InputStream input,
12915          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12916          throws java.io.IOException {
12917        return PARSER.parseDelimitedFrom(input, extensionRegistry);
12918      }
12919      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
12920          com.google.protobuf.CodedInputStream input)
12921          throws java.io.IOException {
12922        return PARSER.parseFrom(input);
12923      }
12924      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parseFrom(
12925          com.google.protobuf.CodedInputStream input,
12926          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12927          throws java.io.IOException {
12928        return PARSER.parseFrom(input, extensionRegistry);
12929      }
12930
12931      public static Builder newBuilder() { return Builder.create(); }
12932      public Builder newBuilderForType() { return newBuilder(); }
12933      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference prototype) {
12934        return newBuilder().mergeFrom(prototype);
12935      }
12936      public Builder toBuilder() { return newBuilder(this); }
12937
12938      @java.lang.Override
12939      protected Builder newBuilderForType(
12940          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12941        Builder builder = new Builder(parent);
12942        return builder;
12943      }
12944      /**
12945       * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference}
12946       */
12947      public static final class Builder extends
12948          com.google.protobuf.GeneratedMessage.Builder<Builder>
12949         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReferenceOrBuilder {
12950        public static final com.google.protobuf.Descriptors.Descriptor
12951            getDescriptor() {
12952          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
12953        }
12954
12955        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12956            internalGetFieldAccessorTable() {
12957          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable
12958              .ensureFieldAccessorsInitialized(
12959                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.Builder.class);
12960        }
12961
12962        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.newBuilder()
12963        private Builder() {
12964          maybeForceBuilderInitialization();
12965        }
12966
12967        private Builder(
12968            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12969          super(parent);
12970          maybeForceBuilderInitialization();
12971        }
12972        private void maybeForceBuilderInitialization() {
12973          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12974          }
12975        }
12976        private static Builder create() {
12977          return new Builder();
12978        }
12979
12980        public Builder clear() {
12981          super.clear();
12982          referredId_ = 0L;
12983          bitField0_ = (bitField0_ & ~0x00000001);
12984          name_ = com.google.protobuf.ByteString.EMPTY;
12985          bitField0_ = (bitField0_ & ~0x00000002);
12986          dstSnapshotId_ = 0;
12987          bitField0_ = (bitField0_ & ~0x00000004);
12988          lastSnapshotId_ = 0;
12989          bitField0_ = (bitField0_ & ~0x00000008);
12990          return this;
12991        }
12992
12993        public Builder clone() {
12994          return create().mergeFrom(buildPartial());
12995        }
12996
12997        public com.google.protobuf.Descriptors.Descriptor
12998            getDescriptorForType() {
12999          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
13000        }
13001
13002        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference getDefaultInstanceForType() {
13003          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.getDefaultInstance();
13004        }
13005
13006        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference build() {
13007          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference result = buildPartial();
13008          if (!result.isInitialized()) {
13009            throw newUninitializedMessageException(result);
13010          }
13011          return result;
13012        }
13013
13014        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference buildPartial() {
13015          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference(this);
13016          int from_bitField0_ = bitField0_;
13017          int to_bitField0_ = 0;
13018          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
13019            to_bitField0_ |= 0x00000001;
13020          }
13021          result.referredId_ = referredId_;
13022          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
13023            to_bitField0_ |= 0x00000002;
13024          }
13025          result.name_ = name_;
13026          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
13027            to_bitField0_ |= 0x00000004;
13028          }
13029          result.dstSnapshotId_ = dstSnapshotId_;
13030          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
13031            to_bitField0_ |= 0x00000008;
13032          }
13033          result.lastSnapshotId_ = lastSnapshotId_;
13034          result.bitField0_ = to_bitField0_;
13035          onBuilt();
13036          return result;
13037        }
13038
13039        public Builder mergeFrom(com.google.protobuf.Message other) {
13040          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference) {
13041            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference)other);
13042          } else {
13043            super.mergeFrom(other);
13044            return this;
13045          }
13046        }
13047
13048        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference other) {
13049          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference.getDefaultInstance()) return this;
13050          if (other.hasReferredId()) {
13051            setReferredId(other.getReferredId());
13052          }
13053          if (other.hasName()) {
13054            setName(other.getName());
13055          }
13056          if (other.hasDstSnapshotId()) {
13057            setDstSnapshotId(other.getDstSnapshotId());
13058          }
13059          if (other.hasLastSnapshotId()) {
13060            setLastSnapshotId(other.getLastSnapshotId());
13061          }
13062          this.mergeUnknownFields(other.getUnknownFields());
13063          return this;
13064        }
13065
13066        public final boolean isInitialized() {
13067          return true;
13068        }
13069
13070        public Builder mergeFrom(
13071            com.google.protobuf.CodedInputStream input,
13072            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13073            throws java.io.IOException {
13074          org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference parsedMessage = null;
13075          try {
13076            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13077          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13078            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.INodeReference) e.getUnfinishedMessage();
13079            throw e;
13080          } finally {
13081            if (parsedMessage != null) {
13082              mergeFrom(parsedMessage);
13083            }
13084          }
13085          return this;
13086        }
13087        private int bitField0_;
13088
13089        // optional uint64 referredId = 1;
13090        private long referredId_ ;
13091        /**
13092         * <code>optional uint64 referredId = 1;</code>
13093         *
13094         * <pre>
13095         * id of the referred inode
13096         * </pre>
13097         */
13098        public boolean hasReferredId() {
13099          return ((bitField0_ & 0x00000001) == 0x00000001);
13100        }
13101        /**
13102         * <code>optional uint64 referredId = 1;</code>
13103         *
13104         * <pre>
13105         * id of the referred inode
13106         * </pre>
13107         */
13108        public long getReferredId() {
13109          return referredId_;
13110        }
13111        /**
13112         * <code>optional uint64 referredId = 1;</code>
13113         *
13114         * <pre>
13115         * id of the referred inode
13116         * </pre>
13117         */
13118        public Builder setReferredId(long value) {
13119          bitField0_ |= 0x00000001;
13120          referredId_ = value;
13121          onChanged();
13122          return this;
13123        }
13124        /**
13125         * <code>optional uint64 referredId = 1;</code>
13126         *
13127         * <pre>
13128         * id of the referred inode
13129         * </pre>
13130         */
13131        public Builder clearReferredId() {
13132          bitField0_ = (bitField0_ & ~0x00000001);
13133          referredId_ = 0L;
13134          onChanged();
13135          return this;
13136        }
13137
13138        // optional bytes name = 2;
13139        private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
13140        /**
13141         * <code>optional bytes name = 2;</code>
13142         *
13143         * <pre>
13144         * local name recorded in WithName
13145         * </pre>
13146         */
13147        public boolean hasName() {
13148          return ((bitField0_ & 0x00000002) == 0x00000002);
13149        }
13150        /**
13151         * <code>optional bytes name = 2;</code>
13152         *
13153         * <pre>
13154         * local name recorded in WithName
13155         * </pre>
13156         */
13157        public com.google.protobuf.ByteString getName() {
13158          return name_;
13159        }
13160        /**
13161         * <code>optional bytes name = 2;</code>
13162         *
13163         * <pre>
13164         * local name recorded in WithName
13165         * </pre>
13166         */
13167        public Builder setName(com.google.protobuf.ByteString value) {
13168          if (value == null) {
13169    throw new NullPointerException();
13170  }
13171  bitField0_ |= 0x00000002;
13172          name_ = value;
13173          onChanged();
13174          return this;
13175        }
13176        /**
13177         * <code>optional bytes name = 2;</code>
13178         *
13179         * <pre>
13180         * local name recorded in WithName
13181         * </pre>
13182         */
13183        public Builder clearName() {
13184          bitField0_ = (bitField0_ & ~0x00000002);
13185          name_ = getDefaultInstance().getName();
13186          onChanged();
13187          return this;
13188        }
13189
13190        // optional uint32 dstSnapshotId = 3;
13191        private int dstSnapshotId_ ;
13192        /**
13193         * <code>optional uint32 dstSnapshotId = 3;</code>
13194         *
13195         * <pre>
13196         * recorded in DstReference
13197         * </pre>
13198         */
13199        public boolean hasDstSnapshotId() {
13200          return ((bitField0_ & 0x00000004) == 0x00000004);
13201        }
13202        /**
13203         * <code>optional uint32 dstSnapshotId = 3;</code>
13204         *
13205         * <pre>
13206         * recorded in DstReference
13207         * </pre>
13208         */
13209        public int getDstSnapshotId() {
13210          return dstSnapshotId_;
13211        }
13212        /**
13213         * <code>optional uint32 dstSnapshotId = 3;</code>
13214         *
13215         * <pre>
13216         * recorded in DstReference
13217         * </pre>
13218         */
13219        public Builder setDstSnapshotId(int value) {
13220          bitField0_ |= 0x00000004;
13221          dstSnapshotId_ = value;
13222          onChanged();
13223          return this;
13224        }
13225        /**
13226         * <code>optional uint32 dstSnapshotId = 3;</code>
13227         *
13228         * <pre>
13229         * recorded in DstReference
13230         * </pre>
13231         */
13232        public Builder clearDstSnapshotId() {
13233          bitField0_ = (bitField0_ & ~0x00000004);
13234          dstSnapshotId_ = 0;
13235          onChanged();
13236          return this;
13237        }
13238
13239        // optional uint32 lastSnapshotId = 4;
13240        private int lastSnapshotId_ ;
13241        /**
13242         * <code>optional uint32 lastSnapshotId = 4;</code>
13243         *
13244         * <pre>
13245         * recorded in WithName
13246         * </pre>
13247         */
13248        public boolean hasLastSnapshotId() {
13249          return ((bitField0_ & 0x00000008) == 0x00000008);
13250        }
13251        /**
13252         * <code>optional uint32 lastSnapshotId = 4;</code>
13253         *
13254         * <pre>
13255         * recorded in WithName
13256         * </pre>
13257         */
13258        public int getLastSnapshotId() {
13259          return lastSnapshotId_;
13260        }
13261        /**
13262         * <code>optional uint32 lastSnapshotId = 4;</code>
13263         *
13264         * <pre>
13265         * recorded in WithName
13266         * </pre>
13267         */
13268        public Builder setLastSnapshotId(int value) {
13269          bitField0_ |= 0x00000008;
13270          lastSnapshotId_ = value;
13271          onChanged();
13272          return this;
13273        }
13274        /**
13275         * <code>optional uint32 lastSnapshotId = 4;</code>
13276         *
13277         * <pre>
13278         * recorded in WithName
13279         * </pre>
13280         */
13281        public Builder clearLastSnapshotId() {
13282          bitField0_ = (bitField0_ & ~0x00000008);
13283          lastSnapshotId_ = 0;
13284          onChanged();
13285          return this;
13286        }
13287
13288        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference)
13289      }
13290
13291      static {
13292        defaultInstance = new INodeReference(true);
13293        defaultInstance.initFields();
13294      }
13295
13296      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeReferenceSection.INodeReference)
13297    }
13298
13299    private void initFields() {
13300    }
13301    private byte memoizedIsInitialized = -1;
13302    public final boolean isInitialized() {
13303      byte isInitialized = memoizedIsInitialized;
13304      if (isInitialized != -1) return isInitialized == 1;
13305
13306      memoizedIsInitialized = 1;
13307      return true;
13308    }
13309
13310    public void writeTo(com.google.protobuf.CodedOutputStream output)
13311                        throws java.io.IOException {
13312      getSerializedSize();
13313      getUnknownFields().writeTo(output);
13314    }
13315
13316    private int memoizedSerializedSize = -1;
13317    public int getSerializedSize() {
13318      int size = memoizedSerializedSize;
13319      if (size != -1) return size;
13320
13321      size = 0;
13322      size += getUnknownFields().getSerializedSize();
13323      memoizedSerializedSize = size;
13324      return size;
13325    }
13326
13327    private static final long serialVersionUID = 0L;
13328    @java.lang.Override
13329    protected java.lang.Object writeReplace()
13330        throws java.io.ObjectStreamException {
13331      return super.writeReplace();
13332    }
13333
13334    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13335        com.google.protobuf.ByteString data)
13336        throws com.google.protobuf.InvalidProtocolBufferException {
13337      return PARSER.parseFrom(data);
13338    }
13339    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13340        com.google.protobuf.ByteString data,
13341        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13342        throws com.google.protobuf.InvalidProtocolBufferException {
13343      return PARSER.parseFrom(data, extensionRegistry);
13344    }
13345    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(byte[] data)
13346        throws com.google.protobuf.InvalidProtocolBufferException {
13347      return PARSER.parseFrom(data);
13348    }
13349    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13350        byte[] data,
13351        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13352        throws com.google.protobuf.InvalidProtocolBufferException {
13353      return PARSER.parseFrom(data, extensionRegistry);
13354    }
13355    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(java.io.InputStream input)
13356        throws java.io.IOException {
13357      return PARSER.parseFrom(input);
13358    }
13359    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13360        java.io.InputStream input,
13361        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13362        throws java.io.IOException {
13363      return PARSER.parseFrom(input, extensionRegistry);
13364    }
13365    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseDelimitedFrom(java.io.InputStream input)
13366        throws java.io.IOException {
13367      return PARSER.parseDelimitedFrom(input);
13368    }
13369    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseDelimitedFrom(
13370        java.io.InputStream input,
13371        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13372        throws java.io.IOException {
13373      return PARSER.parseDelimitedFrom(input, extensionRegistry);
13374    }
13375    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13376        com.google.protobuf.CodedInputStream input)
13377        throws java.io.IOException {
13378      return PARSER.parseFrom(input);
13379    }
13380    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parseFrom(
13381        com.google.protobuf.CodedInputStream input,
13382        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13383        throws java.io.IOException {
13384      return PARSER.parseFrom(input, extensionRegistry);
13385    }
13386
13387    public static Builder newBuilder() { return Builder.create(); }
13388    public Builder newBuilderForType() { return newBuilder(); }
13389    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection prototype) {
13390      return newBuilder().mergeFrom(prototype);
13391    }
13392    public Builder toBuilder() { return newBuilder(this); }
13393
13394    @java.lang.Override
13395    protected Builder newBuilderForType(
13396        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13397      Builder builder = new Builder(parent);
13398      return builder;
13399    }
13400    /**
13401     * Protobuf type {@code hadoop.hdfs.fsimage.INodeReferenceSection}
13402     */
13403    public static final class Builder extends
13404        com.google.protobuf.GeneratedMessage.Builder<Builder>
13405       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSectionOrBuilder {
13406      public static final com.google.protobuf.Descriptors.Descriptor
13407          getDescriptor() {
13408        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
13409      }
13410
13411      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13412          internalGetFieldAccessorTable() {
13413        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable
13414            .ensureFieldAccessorsInitialized(
13415                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.Builder.class);
13416      }
13417
13418      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.newBuilder()
13419      private Builder() {
13420        maybeForceBuilderInitialization();
13421      }
13422
13423      private Builder(
13424          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13425        super(parent);
13426        maybeForceBuilderInitialization();
13427      }
13428      private void maybeForceBuilderInitialization() {
13429        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13430        }
13431      }
13432      private static Builder create() {
13433        return new Builder();
13434      }
13435
13436      public Builder clear() {
13437        super.clear();
13438        return this;
13439      }
13440
13441      public Builder clone() {
13442        return create().mergeFrom(buildPartial());
13443      }
13444
13445      public com.google.protobuf.Descriptors.Descriptor
13446          getDescriptorForType() {
13447        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
13448      }
13449
13450      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection getDefaultInstanceForType() {
13451        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.getDefaultInstance();
13452      }
13453
13454      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection build() {
13455        org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection result = buildPartial();
13456        if (!result.isInitialized()) {
13457          throw newUninitializedMessageException(result);
13458        }
13459        return result;
13460      }
13461
13462      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection buildPartial() {
13463        org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection(this);
13464        onBuilt();
13465        return result;
13466      }
13467
13468      public Builder mergeFrom(com.google.protobuf.Message other) {
13469        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection) {
13470          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection)other);
13471        } else {
13472          super.mergeFrom(other);
13473          return this;
13474        }
13475      }
13476
13477      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection other) {
13478        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection.getDefaultInstance()) return this;
13479        this.mergeUnknownFields(other.getUnknownFields());
13480        return this;
13481      }
13482
13483      public final boolean isInitialized() {
13484        return true;
13485      }
13486
13487      public Builder mergeFrom(
13488          com.google.protobuf.CodedInputStream input,
13489          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13490          throws java.io.IOException {
13491        org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection parsedMessage = null;
13492        try {
13493          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13494        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13495          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection) e.getUnfinishedMessage();
13496          throw e;
13497        } finally {
13498          if (parsedMessage != null) {
13499            mergeFrom(parsedMessage);
13500          }
13501        }
13502        return this;
13503      }
13504
13505      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.INodeReferenceSection)
13506    }
13507
13508    static {
13509      defaultInstance = new INodeReferenceSection(true);
13510      defaultInstance.initFields();
13511    }
13512
13513    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.INodeReferenceSection)
13514  }
13515
13516  public interface SnapshotSectionOrBuilder
13517      extends com.google.protobuf.MessageOrBuilder {
13518
13519    // optional uint32 snapshotCounter = 1;
13520    /**
13521     * <code>optional uint32 snapshotCounter = 1;</code>
13522     */
13523    boolean hasSnapshotCounter();
13524    /**
13525     * <code>optional uint32 snapshotCounter = 1;</code>
13526     */
13527    int getSnapshotCounter();
13528
13529    // repeated uint64 snapshottableDir = 2 [packed = true];
13530    /**
13531     * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13532     */
13533    java.util.List<java.lang.Long> getSnapshottableDirList();
13534    /**
13535     * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13536     */
13537    int getSnapshottableDirCount();
13538    /**
13539     * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
13540     */
13541    long getSnapshottableDir(int index);
13542
13543    // optional uint32 numSnapshots = 3;
13544    /**
13545     * <code>optional uint32 numSnapshots = 3;</code>
13546     *
13547     * <pre>
13548     * total number of snapshots
13549     * </pre>
13550     */
13551    boolean hasNumSnapshots();
13552    /**
13553     * <code>optional uint32 numSnapshots = 3;</code>
13554     *
13555     * <pre>
13556     * total number of snapshots
13557     * </pre>
13558     */
13559    int getNumSnapshots();
13560  }
13561  /**
13562   * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection}
13563   *
13564   * <pre>
13565   **
13566   * This section records the information about snapshot
13567   * NAME: SNAPSHOT
13568   * </pre>
13569   */
13570  public static final class SnapshotSection extends
13571      com.google.protobuf.GeneratedMessage
13572      implements SnapshotSectionOrBuilder {
13573    // Use SnapshotSection.newBuilder() to construct.
13574    private SnapshotSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13575      super(builder);
13576      this.unknownFields = builder.getUnknownFields();
13577    }
13578    private SnapshotSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13579
13580    private static final SnapshotSection defaultInstance;
13581    public static SnapshotSection getDefaultInstance() {
13582      return defaultInstance;
13583    }
13584
13585    public SnapshotSection getDefaultInstanceForType() {
13586      return defaultInstance;
13587    }
13588
13589    private final com.google.protobuf.UnknownFieldSet unknownFields;
13590    @java.lang.Override
13591    public final com.google.protobuf.UnknownFieldSet
13592        getUnknownFields() {
13593      return this.unknownFields;
13594    }
13595    private SnapshotSection(
13596        com.google.protobuf.CodedInputStream input,
13597        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13598        throws com.google.protobuf.InvalidProtocolBufferException {
13599      initFields();
13600      int mutable_bitField0_ = 0;
13601      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13602          com.google.protobuf.UnknownFieldSet.newBuilder();
13603      try {
13604        boolean done = false;
13605        while (!done) {
13606          int tag = input.readTag();
13607          switch (tag) {
13608            case 0:
13609              done = true;
13610              break;
13611            default: {
13612              if (!parseUnknownField(input, unknownFields,
13613                                     extensionRegistry, tag)) {
13614                done = true;
13615              }
13616              break;
13617            }
13618            case 8: {
13619              bitField0_ |= 0x00000001;
13620              snapshotCounter_ = input.readUInt32();
13621              break;
13622            }
13623            case 16: {
13624              if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
13625                snapshottableDir_ = new java.util.ArrayList<java.lang.Long>();
13626                mutable_bitField0_ |= 0x00000002;
13627              }
13628              snapshottableDir_.add(input.readUInt64());
13629              break;
13630            }
13631            case 18: {
13632              int length = input.readRawVarint32();
13633              int limit = input.pushLimit(length);
13634              if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) {
13635                snapshottableDir_ = new java.util.ArrayList<java.lang.Long>();
13636                mutable_bitField0_ |= 0x00000002;
13637              }
13638              while (input.getBytesUntilLimit() > 0) {
13639                snapshottableDir_.add(input.readUInt64());
13640              }
13641              input.popLimit(limit);
13642              break;
13643            }
13644            case 24: {
13645              bitField0_ |= 0x00000002;
13646              numSnapshots_ = input.readUInt32();
13647              break;
13648            }
13649          }
13650        }
13651      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13652        throw e.setUnfinishedMessage(this);
13653      } catch (java.io.IOException e) {
13654        throw new com.google.protobuf.InvalidProtocolBufferException(
13655            e.getMessage()).setUnfinishedMessage(this);
13656      } finally {
13657        if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
13658          snapshottableDir_ = java.util.Collections.unmodifiableList(snapshottableDir_);
13659        }
13660        this.unknownFields = unknownFields.build();
13661        makeExtensionsImmutable();
13662      }
13663    }
13664    public static final com.google.protobuf.Descriptors.Descriptor
13665        getDescriptor() {
13666      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
13667    }
13668
13669    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13670        internalGetFieldAccessorTable() {
13671      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable
13672          .ensureFieldAccessorsInitialized(
13673              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Builder.class);
13674    }
13675
13676    public static com.google.protobuf.Parser<SnapshotSection> PARSER =
13677        new com.google.protobuf.AbstractParser<SnapshotSection>() {
13678      public SnapshotSection parsePartialFrom(
13679          com.google.protobuf.CodedInputStream input,
13680          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13681          throws com.google.protobuf.InvalidProtocolBufferException {
13682        return new SnapshotSection(input, extensionRegistry);
13683      }
13684    };
13685
13686    @java.lang.Override
13687    public com.google.protobuf.Parser<SnapshotSection> getParserForType() {
13688      return PARSER;
13689    }
13690
13691    public interface SnapshotOrBuilder
13692        extends com.google.protobuf.MessageOrBuilder {
13693
13694      // optional uint32 snapshotId = 1;
13695      /**
13696       * <code>optional uint32 snapshotId = 1;</code>
13697       */
13698      boolean hasSnapshotId();
13699      /**
13700       * <code>optional uint32 snapshotId = 1;</code>
13701       */
13702      int getSnapshotId();
13703
13704      // optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;
13705      /**
13706       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13707       *
13708       * <pre>
13709       * Snapshot root
13710       * </pre>
13711       */
13712      boolean hasRoot();
13713      /**
13714       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13715       *
13716       * <pre>
13717       * Snapshot root
13718       * </pre>
13719       */
13720      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getRoot();
13721      /**
13722       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13723       *
13724       * <pre>
13725       * Snapshot root
13726       * </pre>
13727       */
13728      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder getRootOrBuilder();
13729    }
13730    /**
13731     * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection.Snapshot}
13732     */
13733    public static final class Snapshot extends
13734        com.google.protobuf.GeneratedMessage
13735        implements SnapshotOrBuilder {
13736      // Use Snapshot.newBuilder() to construct.
13737      private Snapshot(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13738        super(builder);
13739        this.unknownFields = builder.getUnknownFields();
13740      }
13741      private Snapshot(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13742
13743      private static final Snapshot defaultInstance;
13744      public static Snapshot getDefaultInstance() {
13745        return defaultInstance;
13746      }
13747
13748      public Snapshot getDefaultInstanceForType() {
13749        return defaultInstance;
13750      }
13751
13752      private final com.google.protobuf.UnknownFieldSet unknownFields;
13753      @java.lang.Override
13754      public final com.google.protobuf.UnknownFieldSet
13755          getUnknownFields() {
13756        return this.unknownFields;
13757      }
13758      private Snapshot(
13759          com.google.protobuf.CodedInputStream input,
13760          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13761          throws com.google.protobuf.InvalidProtocolBufferException {
13762        initFields();
13763        int mutable_bitField0_ = 0;
13764        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13765            com.google.protobuf.UnknownFieldSet.newBuilder();
13766        try {
13767          boolean done = false;
13768          while (!done) {
13769            int tag = input.readTag();
13770            switch (tag) {
13771              case 0:
13772                done = true;
13773                break;
13774              default: {
13775                if (!parseUnknownField(input, unknownFields,
13776                                       extensionRegistry, tag)) {
13777                  done = true;
13778                }
13779                break;
13780              }
13781              case 8: {
13782                bitField0_ |= 0x00000001;
13783                snapshotId_ = input.readUInt32();
13784                break;
13785              }
13786              case 18: {
13787                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder subBuilder = null;
13788                if (((bitField0_ & 0x00000002) == 0x00000002)) {
13789                  subBuilder = root_.toBuilder();
13790                }
13791                root_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.PARSER, extensionRegistry);
13792                if (subBuilder != null) {
13793                  subBuilder.mergeFrom(root_);
13794                  root_ = subBuilder.buildPartial();
13795                }
13796                bitField0_ |= 0x00000002;
13797                break;
13798              }
13799            }
13800          }
13801        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13802          throw e.setUnfinishedMessage(this);
13803        } catch (java.io.IOException e) {
13804          throw new com.google.protobuf.InvalidProtocolBufferException(
13805              e.getMessage()).setUnfinishedMessage(this);
13806        } finally {
13807          this.unknownFields = unknownFields.build();
13808          makeExtensionsImmutable();
13809        }
13810      }
13811      public static final com.google.protobuf.Descriptors.Descriptor
13812          getDescriptor() {
13813        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
13814      }
13815
13816      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13817          internalGetFieldAccessorTable() {
13818        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable
13819            .ensureFieldAccessorsInitialized(
13820                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.Builder.class);
13821      }
13822
13823      public static com.google.protobuf.Parser<Snapshot> PARSER =
13824          new com.google.protobuf.AbstractParser<Snapshot>() {
13825        public Snapshot parsePartialFrom(
13826            com.google.protobuf.CodedInputStream input,
13827            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13828            throws com.google.protobuf.InvalidProtocolBufferException {
13829          return new Snapshot(input, extensionRegistry);
13830        }
13831      };
13832
13833      @java.lang.Override
13834      public com.google.protobuf.Parser<Snapshot> getParserForType() {
13835        return PARSER;
13836      }
13837
13838      private int bitField0_;
13839      // optional uint32 snapshotId = 1;
13840      public static final int SNAPSHOTID_FIELD_NUMBER = 1;
13841      private int snapshotId_;
13842      /**
13843       * <code>optional uint32 snapshotId = 1;</code>
13844       */
13845      public boolean hasSnapshotId() {
13846        return ((bitField0_ & 0x00000001) == 0x00000001);
13847      }
13848      /**
13849       * <code>optional uint32 snapshotId = 1;</code>
13850       */
13851      public int getSnapshotId() {
13852        return snapshotId_;
13853      }
13854
13855      // optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;
13856      public static final int ROOT_FIELD_NUMBER = 2;
13857      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode root_;
13858      /**
13859       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13860       *
13861       * <pre>
13862       * Snapshot root
13863       * </pre>
13864       */
13865      public boolean hasRoot() {
13866        return ((bitField0_ & 0x00000002) == 0x00000002);
13867      }
13868      /**
13869       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13870       *
13871       * <pre>
13872       * Snapshot root
13873       * </pre>
13874       */
13875      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getRoot() {
13876        return root_;
13877      }
13878      /**
13879       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
13880       *
13881       * <pre>
13882       * Snapshot root
13883       * </pre>
13884       */
13885      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder getRootOrBuilder() {
13886        return root_;
13887      }
13888
13889      private void initFields() {
13890        snapshotId_ = 0;
13891        root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
13892      }
13893      private byte memoizedIsInitialized = -1;
13894      public final boolean isInitialized() {
13895        byte isInitialized = memoizedIsInitialized;
13896        if (isInitialized != -1) return isInitialized == 1;
13897
13898        if (hasRoot()) {
13899          if (!getRoot().isInitialized()) {
13900            memoizedIsInitialized = 0;
13901            return false;
13902          }
13903        }
13904        memoizedIsInitialized = 1;
13905        return true;
13906      }
13907
13908      public void writeTo(com.google.protobuf.CodedOutputStream output)
13909                          throws java.io.IOException {
13910        getSerializedSize();
13911        if (((bitField0_ & 0x00000001) == 0x00000001)) {
13912          output.writeUInt32(1, snapshotId_);
13913        }
13914        if (((bitField0_ & 0x00000002) == 0x00000002)) {
13915          output.writeMessage(2, root_);
13916        }
13917        getUnknownFields().writeTo(output);
13918      }
13919
13920      private int memoizedSerializedSize = -1;
13921      public int getSerializedSize() {
13922        int size = memoizedSerializedSize;
13923        if (size != -1) return size;
13924
13925        size = 0;
13926        if (((bitField0_ & 0x00000001) == 0x00000001)) {
13927          size += com.google.protobuf.CodedOutputStream
13928            .computeUInt32Size(1, snapshotId_);
13929        }
13930        if (((bitField0_ & 0x00000002) == 0x00000002)) {
13931          size += com.google.protobuf.CodedOutputStream
13932            .computeMessageSize(2, root_);
13933        }
13934        size += getUnknownFields().getSerializedSize();
13935        memoizedSerializedSize = size;
13936        return size;
13937      }
13938
13939      private static final long serialVersionUID = 0L;
13940      @java.lang.Override
13941      protected java.lang.Object writeReplace()
13942          throws java.io.ObjectStreamException {
13943        return super.writeReplace();
13944      }
13945
13946      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
13947          com.google.protobuf.ByteString data)
13948          throws com.google.protobuf.InvalidProtocolBufferException {
13949        return PARSER.parseFrom(data);
13950      }
13951      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
13952          com.google.protobuf.ByteString data,
13953          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13954          throws com.google.protobuf.InvalidProtocolBufferException {
13955        return PARSER.parseFrom(data, extensionRegistry);
13956      }
13957      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(byte[] data)
13958          throws com.google.protobuf.InvalidProtocolBufferException {
13959        return PARSER.parseFrom(data);
13960      }
13961      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
13962          byte[] data,
13963          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13964          throws com.google.protobuf.InvalidProtocolBufferException {
13965        return PARSER.parseFrom(data, extensionRegistry);
13966      }
13967      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(java.io.InputStream input)
13968          throws java.io.IOException {
13969        return PARSER.parseFrom(input);
13970      }
13971      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
13972          java.io.InputStream input,
13973          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13974          throws java.io.IOException {
13975        return PARSER.parseFrom(input, extensionRegistry);
13976      }
13977      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseDelimitedFrom(java.io.InputStream input)
13978          throws java.io.IOException {
13979        return PARSER.parseDelimitedFrom(input);
13980      }
13981      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseDelimitedFrom(
13982          java.io.InputStream input,
13983          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13984          throws java.io.IOException {
13985        return PARSER.parseDelimitedFrom(input, extensionRegistry);
13986      }
13987      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
13988          com.google.protobuf.CodedInputStream input)
13989          throws java.io.IOException {
13990        return PARSER.parseFrom(input);
13991      }
13992      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parseFrom(
13993          com.google.protobuf.CodedInputStream input,
13994          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13995          throws java.io.IOException {
13996        return PARSER.parseFrom(input, extensionRegistry);
13997      }
13998
13999      public static Builder newBuilder() { return Builder.create(); }
14000      public Builder newBuilderForType() { return newBuilder(); }
14001      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot prototype) {
14002        return newBuilder().mergeFrom(prototype);
14003      }
14004      public Builder toBuilder() { return newBuilder(this); }
14005
14006      @java.lang.Override
14007      protected Builder newBuilderForType(
14008          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14009        Builder builder = new Builder(parent);
14010        return builder;
14011      }
14012      /**
14013       * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection.Snapshot}
14014       */
14015      public static final class Builder extends
14016          com.google.protobuf.GeneratedMessage.Builder<Builder>
14017         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.SnapshotOrBuilder {
14018        public static final com.google.protobuf.Descriptors.Descriptor
14019            getDescriptor() {
14020          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
14021        }
14022
14023        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14024            internalGetFieldAccessorTable() {
14025          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable
14026              .ensureFieldAccessorsInitialized(
14027                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.Builder.class);
14028        }
14029
14030        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.newBuilder()
14031        private Builder() {
14032          maybeForceBuilderInitialization();
14033        }
14034
14035        private Builder(
14036            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14037          super(parent);
14038          maybeForceBuilderInitialization();
14039        }
14040        private void maybeForceBuilderInitialization() {
14041          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14042            getRootFieldBuilder();
14043          }
14044        }
14045        private static Builder create() {
14046          return new Builder();
14047        }
14048
14049        public Builder clear() {
14050          super.clear();
14051          snapshotId_ = 0;
14052          bitField0_ = (bitField0_ & ~0x00000001);
14053          if (rootBuilder_ == null) {
14054            root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
14055          } else {
14056            rootBuilder_.clear();
14057          }
14058          bitField0_ = (bitField0_ & ~0x00000002);
14059          return this;
14060        }
14061
14062        public Builder clone() {
14063          return create().mergeFrom(buildPartial());
14064        }
14065
14066        public com.google.protobuf.Descriptors.Descriptor
14067            getDescriptorForType() {
14068          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
14069        }
14070
14071        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot getDefaultInstanceForType() {
14072          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.getDefaultInstance();
14073        }
14074
14075        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot build() {
14076          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot result = buildPartial();
14077          if (!result.isInitialized()) {
14078            throw newUninitializedMessageException(result);
14079          }
14080          return result;
14081        }
14082
14083        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot buildPartial() {
14084          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot(this);
14085          int from_bitField0_ = bitField0_;
14086          int to_bitField0_ = 0;
14087          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
14088            to_bitField0_ |= 0x00000001;
14089          }
14090          result.snapshotId_ = snapshotId_;
14091          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
14092            to_bitField0_ |= 0x00000002;
14093          }
14094          if (rootBuilder_ == null) {
14095            result.root_ = root_;
14096          } else {
14097            result.root_ = rootBuilder_.build();
14098          }
14099          result.bitField0_ = to_bitField0_;
14100          onBuilt();
14101          return result;
14102        }
14103
14104        public Builder mergeFrom(com.google.protobuf.Message other) {
14105          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot) {
14106            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot)other);
14107          } else {
14108            super.mergeFrom(other);
14109            return this;
14110          }
14111        }
14112
14113        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot other) {
14114          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot.getDefaultInstance()) return this;
14115          if (other.hasSnapshotId()) {
14116            setSnapshotId(other.getSnapshotId());
14117          }
14118          if (other.hasRoot()) {
14119            mergeRoot(other.getRoot());
14120          }
14121          this.mergeUnknownFields(other.getUnknownFields());
14122          return this;
14123        }
14124
14125        public final boolean isInitialized() {
14126          if (hasRoot()) {
14127            if (!getRoot().isInitialized()) {
14128              
14129              return false;
14130            }
14131          }
14132          return true;
14133        }
14134
14135        public Builder mergeFrom(
14136            com.google.protobuf.CodedInputStream input,
14137            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14138            throws java.io.IOException {
14139          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot parsedMessage = null;
14140          try {
14141            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
14142          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14143            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Snapshot) e.getUnfinishedMessage();
14144            throw e;
14145          } finally {
14146            if (parsedMessage != null) {
14147              mergeFrom(parsedMessage);
14148            }
14149          }
14150          return this;
14151        }
14152        private int bitField0_;
14153
14154        // optional uint32 snapshotId = 1;
14155        private int snapshotId_ ;
14156        /**
14157         * <code>optional uint32 snapshotId = 1;</code>
14158         */
14159        public boolean hasSnapshotId() {
14160          return ((bitField0_ & 0x00000001) == 0x00000001);
14161        }
14162        /**
14163         * <code>optional uint32 snapshotId = 1;</code>
14164         */
14165        public int getSnapshotId() {
14166          return snapshotId_;
14167        }
14168        /**
14169         * <code>optional uint32 snapshotId = 1;</code>
14170         */
14171        public Builder setSnapshotId(int value) {
14172          bitField0_ |= 0x00000001;
14173          snapshotId_ = value;
14174          onChanged();
14175          return this;
14176        }
14177        /**
14178         * <code>optional uint32 snapshotId = 1;</code>
14179         */
14180        public Builder clearSnapshotId() {
14181          bitField0_ = (bitField0_ & ~0x00000001);
14182          snapshotId_ = 0;
14183          onChanged();
14184          return this;
14185        }
14186
14187        // optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;
14188        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
14189        private com.google.protobuf.SingleFieldBuilder<
14190            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder> rootBuilder_;
14191        /**
14192         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14193         *
14194         * <pre>
14195         * Snapshot root
14196         * </pre>
14197         */
14198        public boolean hasRoot() {
14199          return ((bitField0_ & 0x00000002) == 0x00000002);
14200        }
14201        /**
14202         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14203         *
14204         * <pre>
14205         * Snapshot root
14206         * </pre>
14207         */
14208        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode getRoot() {
14209          if (rootBuilder_ == null) {
14210            return root_;
14211          } else {
14212            return rootBuilder_.getMessage();
14213          }
14214        }
14215        /**
14216         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14217         *
14218         * <pre>
14219         * Snapshot root
14220         * </pre>
14221         */
14222        public Builder setRoot(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode value) {
14223          if (rootBuilder_ == null) {
14224            if (value == null) {
14225              throw new NullPointerException();
14226            }
14227            root_ = value;
14228            onChanged();
14229          } else {
14230            rootBuilder_.setMessage(value);
14231          }
14232          bitField0_ |= 0x00000002;
14233          return this;
14234        }
14235        /**
14236         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14237         *
14238         * <pre>
14239         * Snapshot root
14240         * </pre>
14241         */
14242        public Builder setRoot(
14243            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder builderForValue) {
14244          if (rootBuilder_ == null) {
14245            root_ = builderForValue.build();
14246            onChanged();
14247          } else {
14248            rootBuilder_.setMessage(builderForValue.build());
14249          }
14250          bitField0_ |= 0x00000002;
14251          return this;
14252        }
14253        /**
14254         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14255         *
14256         * <pre>
14257         * Snapshot root
14258         * </pre>
14259         */
14260        public Builder mergeRoot(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode value) {
14261          if (rootBuilder_ == null) {
14262            if (((bitField0_ & 0x00000002) == 0x00000002) &&
14263                root_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance()) {
14264              root_ =
14265                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.newBuilder(root_).mergeFrom(value).buildPartial();
14266            } else {
14267              root_ = value;
14268            }
14269            onChanged();
14270          } else {
14271            rootBuilder_.mergeFrom(value);
14272          }
14273          bitField0_ |= 0x00000002;
14274          return this;
14275        }
14276        /**
14277         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14278         *
14279         * <pre>
14280         * Snapshot root
14281         * </pre>
14282         */
14283        public Builder clearRoot() {
14284          if (rootBuilder_ == null) {
14285            root_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.getDefaultInstance();
14286            onChanged();
14287          } else {
14288            rootBuilder_.clear();
14289          }
14290          bitField0_ = (bitField0_ & ~0x00000002);
14291          return this;
14292        }
14293        /**
14294         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14295         *
14296         * <pre>
14297         * Snapshot root
14298         * </pre>
14299         */
14300        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder getRootBuilder() {
14301          bitField0_ |= 0x00000002;
14302          onChanged();
14303          return getRootFieldBuilder().getBuilder();
14304        }
14305        /**
14306         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14307         *
14308         * <pre>
14309         * Snapshot root
14310         * </pre>
14311         */
14312        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder getRootOrBuilder() {
14313          if (rootBuilder_ != null) {
14314            return rootBuilder_.getMessageOrBuilder();
14315          } else {
14316            return root_;
14317          }
14318        }
14319        /**
14320         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INode root = 2;</code>
14321         *
14322         * <pre>
14323         * Snapshot root
14324         * </pre>
14325         */
14326        private com.google.protobuf.SingleFieldBuilder<
14327            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder> 
14328            getRootFieldBuilder() {
14329          if (rootBuilder_ == null) {
14330            rootBuilder_ = new com.google.protobuf.SingleFieldBuilder<
14331                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeOrBuilder>(
14332                    root_,
14333                    getParentForChildren(),
14334                    isClean());
14335            root_ = null;
14336          }
14337          return rootBuilder_;
14338        }
14339
14340        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotSection.Snapshot)
14341      }
14342
14343      static {
14344        defaultInstance = new Snapshot(true);
14345        defaultInstance.initFields();
14346      }
14347
14348      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotSection.Snapshot)
14349    }
14350
14351    private int bitField0_;
14352    // optional uint32 snapshotCounter = 1;
14353    public static final int SNAPSHOTCOUNTER_FIELD_NUMBER = 1;
14354    private int snapshotCounter_;
14355    /**
14356     * <code>optional uint32 snapshotCounter = 1;</code>
14357     */
14358    public boolean hasSnapshotCounter() {
14359      return ((bitField0_ & 0x00000001) == 0x00000001);
14360    }
14361    /**
14362     * <code>optional uint32 snapshotCounter = 1;</code>
14363     */
14364    public int getSnapshotCounter() {
14365      return snapshotCounter_;
14366    }
14367
14368    // repeated uint64 snapshottableDir = 2 [packed = true];
14369    public static final int SNAPSHOTTABLEDIR_FIELD_NUMBER = 2;
14370    private java.util.List<java.lang.Long> snapshottableDir_;
14371    /**
14372     * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14373     */
14374    public java.util.List<java.lang.Long>
14375        getSnapshottableDirList() {
14376      return snapshottableDir_;
14377    }
14378    /**
14379     * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14380     */
14381    public int getSnapshottableDirCount() {
14382      return snapshottableDir_.size();
14383    }
14384    /**
14385     * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14386     */
14387    public long getSnapshottableDir(int index) {
14388      return snapshottableDir_.get(index);
14389    }
14390    private int snapshottableDirMemoizedSerializedSize = -1;
14391
14392    // optional uint32 numSnapshots = 3;
14393    public static final int NUMSNAPSHOTS_FIELD_NUMBER = 3;
14394    private int numSnapshots_;
14395    /**
14396     * <code>optional uint32 numSnapshots = 3;</code>
14397     *
14398     * <pre>
14399     * total number of snapshots
14400     * </pre>
14401     */
14402    public boolean hasNumSnapshots() {
14403      return ((bitField0_ & 0x00000002) == 0x00000002);
14404    }
14405    /**
14406     * <code>optional uint32 numSnapshots = 3;</code>
14407     *
14408     * <pre>
14409     * total number of snapshots
14410     * </pre>
14411     */
14412    public int getNumSnapshots() {
14413      return numSnapshots_;
14414    }
14415
14416    private void initFields() {
14417      snapshotCounter_ = 0;
14418      snapshottableDir_ = java.util.Collections.emptyList();
14419      numSnapshots_ = 0;
14420    }
14421    private byte memoizedIsInitialized = -1;
14422    public final boolean isInitialized() {
14423      byte isInitialized = memoizedIsInitialized;
14424      if (isInitialized != -1) return isInitialized == 1;
14425
14426      memoizedIsInitialized = 1;
14427      return true;
14428    }
14429
14430    public void writeTo(com.google.protobuf.CodedOutputStream output)
14431                        throws java.io.IOException {
14432      getSerializedSize();
14433      if (((bitField0_ & 0x00000001) == 0x00000001)) {
14434        output.writeUInt32(1, snapshotCounter_);
14435      }
14436      if (getSnapshottableDirList().size() > 0) {
14437        output.writeRawVarint32(18);
14438        output.writeRawVarint32(snapshottableDirMemoizedSerializedSize);
14439      }
14440      for (int i = 0; i < snapshottableDir_.size(); i++) {
14441        output.writeUInt64NoTag(snapshottableDir_.get(i));
14442      }
14443      if (((bitField0_ & 0x00000002) == 0x00000002)) {
14444        output.writeUInt32(3, numSnapshots_);
14445      }
14446      getUnknownFields().writeTo(output);
14447    }
14448
14449    private int memoizedSerializedSize = -1;
14450    public int getSerializedSize() {
14451      int size = memoizedSerializedSize;
14452      if (size != -1) return size;
14453
14454      size = 0;
14455      if (((bitField0_ & 0x00000001) == 0x00000001)) {
14456        size += com.google.protobuf.CodedOutputStream
14457          .computeUInt32Size(1, snapshotCounter_);
14458      }
14459      {
14460        int dataSize = 0;
14461        for (int i = 0; i < snapshottableDir_.size(); i++) {
14462          dataSize += com.google.protobuf.CodedOutputStream
14463            .computeUInt64SizeNoTag(snapshottableDir_.get(i));
14464        }
14465        size += dataSize;
14466        if (!getSnapshottableDirList().isEmpty()) {
14467          size += 1;
14468          size += com.google.protobuf.CodedOutputStream
14469              .computeInt32SizeNoTag(dataSize);
14470        }
14471        snapshottableDirMemoizedSerializedSize = dataSize;
14472      }
14473      if (((bitField0_ & 0x00000002) == 0x00000002)) {
14474        size += com.google.protobuf.CodedOutputStream
14475          .computeUInt32Size(3, numSnapshots_);
14476      }
14477      size += getUnknownFields().getSerializedSize();
14478      memoizedSerializedSize = size;
14479      return size;
14480    }
14481
14482    private static final long serialVersionUID = 0L;
14483    @java.lang.Override
14484    protected java.lang.Object writeReplace()
14485        throws java.io.ObjectStreamException {
14486      return super.writeReplace();
14487    }
14488
14489    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14490        com.google.protobuf.ByteString data)
14491        throws com.google.protobuf.InvalidProtocolBufferException {
14492      return PARSER.parseFrom(data);
14493    }
14494    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14495        com.google.protobuf.ByteString data,
14496        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14497        throws com.google.protobuf.InvalidProtocolBufferException {
14498      return PARSER.parseFrom(data, extensionRegistry);
14499    }
14500    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(byte[] data)
14501        throws com.google.protobuf.InvalidProtocolBufferException {
14502      return PARSER.parseFrom(data);
14503    }
14504    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14505        byte[] data,
14506        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14507        throws com.google.protobuf.InvalidProtocolBufferException {
14508      return PARSER.parseFrom(data, extensionRegistry);
14509    }
14510    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(java.io.InputStream input)
14511        throws java.io.IOException {
14512      return PARSER.parseFrom(input);
14513    }
14514    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14515        java.io.InputStream input,
14516        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14517        throws java.io.IOException {
14518      return PARSER.parseFrom(input, extensionRegistry);
14519    }
14520    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseDelimitedFrom(java.io.InputStream input)
14521        throws java.io.IOException {
14522      return PARSER.parseDelimitedFrom(input);
14523    }
14524    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseDelimitedFrom(
14525        java.io.InputStream input,
14526        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14527        throws java.io.IOException {
14528      return PARSER.parseDelimitedFrom(input, extensionRegistry);
14529    }
14530    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14531        com.google.protobuf.CodedInputStream input)
14532        throws java.io.IOException {
14533      return PARSER.parseFrom(input);
14534    }
14535    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parseFrom(
14536        com.google.protobuf.CodedInputStream input,
14537        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14538        throws java.io.IOException {
14539      return PARSER.parseFrom(input, extensionRegistry);
14540    }
14541
14542    public static Builder newBuilder() { return Builder.create(); }
14543    public Builder newBuilderForType() { return newBuilder(); }
14544    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection prototype) {
14545      return newBuilder().mergeFrom(prototype);
14546    }
14547    public Builder toBuilder() { return newBuilder(this); }
14548
14549    @java.lang.Override
14550    protected Builder newBuilderForType(
14551        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14552      Builder builder = new Builder(parent);
14553      return builder;
14554    }
14555    /**
14556     * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotSection}
14557     *
14558     * <pre>
14559     **
14560     * This section records the information about snapshot
14561     * NAME: SNAPSHOT
14562     * </pre>
14563     */
14564    public static final class Builder extends
14565        com.google.protobuf.GeneratedMessage.Builder<Builder>
14566       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSectionOrBuilder {
14567      public static final com.google.protobuf.Descriptors.Descriptor
14568          getDescriptor() {
14569        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
14570      }
14571
14572      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14573          internalGetFieldAccessorTable() {
14574        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable
14575            .ensureFieldAccessorsInitialized(
14576                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.Builder.class);
14577      }
14578
14579      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.newBuilder()
14580      private Builder() {
14581        maybeForceBuilderInitialization();
14582      }
14583
14584      private Builder(
14585          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14586        super(parent);
14587        maybeForceBuilderInitialization();
14588      }
14589      private void maybeForceBuilderInitialization() {
14590        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14591        }
14592      }
14593      private static Builder create() {
14594        return new Builder();
14595      }
14596
14597      public Builder clear() {
14598        super.clear();
14599        snapshotCounter_ = 0;
14600        bitField0_ = (bitField0_ & ~0x00000001);
14601        snapshottableDir_ = java.util.Collections.emptyList();
14602        bitField0_ = (bitField0_ & ~0x00000002);
14603        numSnapshots_ = 0;
14604        bitField0_ = (bitField0_ & ~0x00000004);
14605        return this;
14606      }
14607
14608      public Builder clone() {
14609        return create().mergeFrom(buildPartial());
14610      }
14611
14612      public com.google.protobuf.Descriptors.Descriptor
14613          getDescriptorForType() {
14614        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
14615      }
14616
14617      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection getDefaultInstanceForType() {
14618        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.getDefaultInstance();
14619      }
14620
14621      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection build() {
14622        org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection result = buildPartial();
14623        if (!result.isInitialized()) {
14624          throw newUninitializedMessageException(result);
14625        }
14626        return result;
14627      }
14628
14629      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection buildPartial() {
14630        org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection(this);
14631        int from_bitField0_ = bitField0_;
14632        int to_bitField0_ = 0;
14633        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
14634          to_bitField0_ |= 0x00000001;
14635        }
14636        result.snapshotCounter_ = snapshotCounter_;
14637        if (((bitField0_ & 0x00000002) == 0x00000002)) {
14638          snapshottableDir_ = java.util.Collections.unmodifiableList(snapshottableDir_);
14639          bitField0_ = (bitField0_ & ~0x00000002);
14640        }
14641        result.snapshottableDir_ = snapshottableDir_;
14642        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
14643          to_bitField0_ |= 0x00000002;
14644        }
14645        result.numSnapshots_ = numSnapshots_;
14646        result.bitField0_ = to_bitField0_;
14647        onBuilt();
14648        return result;
14649      }
14650
14651      public Builder mergeFrom(com.google.protobuf.Message other) {
14652        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection) {
14653          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection)other);
14654        } else {
14655          super.mergeFrom(other);
14656          return this;
14657        }
14658      }
14659
14660      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection other) {
14661        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection.getDefaultInstance()) return this;
14662        if (other.hasSnapshotCounter()) {
14663          setSnapshotCounter(other.getSnapshotCounter());
14664        }
14665        if (!other.snapshottableDir_.isEmpty()) {
14666          if (snapshottableDir_.isEmpty()) {
14667            snapshottableDir_ = other.snapshottableDir_;
14668            bitField0_ = (bitField0_ & ~0x00000002);
14669          } else {
14670            ensureSnapshottableDirIsMutable();
14671            snapshottableDir_.addAll(other.snapshottableDir_);
14672          }
14673          onChanged();
14674        }
14675        if (other.hasNumSnapshots()) {
14676          setNumSnapshots(other.getNumSnapshots());
14677        }
14678        this.mergeUnknownFields(other.getUnknownFields());
14679        return this;
14680      }
14681
14682      public final boolean isInitialized() {
14683        return true;
14684      }
14685
14686      public Builder mergeFrom(
14687          com.google.protobuf.CodedInputStream input,
14688          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14689          throws java.io.IOException {
14690        org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection parsedMessage = null;
14691        try {
14692          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
14693        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14694          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection) e.getUnfinishedMessage();
14695          throw e;
14696        } finally {
14697          if (parsedMessage != null) {
14698            mergeFrom(parsedMessage);
14699          }
14700        }
14701        return this;
14702      }
14703      private int bitField0_;
14704
14705      // optional uint32 snapshotCounter = 1;
14706      private int snapshotCounter_ ;
14707      /**
14708       * <code>optional uint32 snapshotCounter = 1;</code>
14709       */
14710      public boolean hasSnapshotCounter() {
14711        return ((bitField0_ & 0x00000001) == 0x00000001);
14712      }
14713      /**
14714       * <code>optional uint32 snapshotCounter = 1;</code>
14715       */
14716      public int getSnapshotCounter() {
14717        return snapshotCounter_;
14718      }
14719      /**
14720       * <code>optional uint32 snapshotCounter = 1;</code>
14721       */
14722      public Builder setSnapshotCounter(int value) {
14723        bitField0_ |= 0x00000001;
14724        snapshotCounter_ = value;
14725        onChanged();
14726        return this;
14727      }
14728      /**
14729       * <code>optional uint32 snapshotCounter = 1;</code>
14730       */
14731      public Builder clearSnapshotCounter() {
14732        bitField0_ = (bitField0_ & ~0x00000001);
14733        snapshotCounter_ = 0;
14734        onChanged();
14735        return this;
14736      }
14737
14738      // repeated uint64 snapshottableDir = 2 [packed = true];
14739      private java.util.List<java.lang.Long> snapshottableDir_ = java.util.Collections.emptyList();
14740      private void ensureSnapshottableDirIsMutable() {
14741        if (!((bitField0_ & 0x00000002) == 0x00000002)) {
14742          snapshottableDir_ = new java.util.ArrayList<java.lang.Long>(snapshottableDir_);
14743          bitField0_ |= 0x00000002;
14744         }
14745      }
14746      /**
14747       * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14748       */
14749      public java.util.List<java.lang.Long>
14750          getSnapshottableDirList() {
14751        return java.util.Collections.unmodifiableList(snapshottableDir_);
14752      }
14753      /**
14754       * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14755       */
14756      public int getSnapshottableDirCount() {
14757        return snapshottableDir_.size();
14758      }
14759      /**
14760       * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14761       */
14762      public long getSnapshottableDir(int index) {
14763        return snapshottableDir_.get(index);
14764      }
14765      /**
14766       * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14767       */
14768      public Builder setSnapshottableDir(
14769          int index, long value) {
14770        ensureSnapshottableDirIsMutable();
14771        snapshottableDir_.set(index, value);
14772        onChanged();
14773        return this;
14774      }
14775      /**
14776       * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14777       */
14778      public Builder addSnapshottableDir(long value) {
14779        ensureSnapshottableDirIsMutable();
14780        snapshottableDir_.add(value);
14781        onChanged();
14782        return this;
14783      }
14784      /**
14785       * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14786       */
14787      public Builder addAllSnapshottableDir(
14788          java.lang.Iterable<? extends java.lang.Long> values) {
14789        ensureSnapshottableDirIsMutable();
14790        super.addAll(values, snapshottableDir_);
14791        onChanged();
14792        return this;
14793      }
14794      /**
14795       * <code>repeated uint64 snapshottableDir = 2 [packed = true];</code>
14796       */
14797      public Builder clearSnapshottableDir() {
14798        snapshottableDir_ = java.util.Collections.emptyList();
14799        bitField0_ = (bitField0_ & ~0x00000002);
14800        onChanged();
14801        return this;
14802      }
14803
14804      // optional uint32 numSnapshots = 3;
14805      private int numSnapshots_ ;
14806      /**
14807       * <code>optional uint32 numSnapshots = 3;</code>
14808       *
14809       * <pre>
14810       * total number of snapshots
14811       * </pre>
14812       */
14813      public boolean hasNumSnapshots() {
14814        return ((bitField0_ & 0x00000004) == 0x00000004);
14815      }
14816      /**
14817       * <code>optional uint32 numSnapshots = 3;</code>
14818       *
14819       * <pre>
14820       * total number of snapshots
14821       * </pre>
14822       */
14823      public int getNumSnapshots() {
14824        return numSnapshots_;
14825      }
14826      /**
14827       * <code>optional uint32 numSnapshots = 3;</code>
14828       *
14829       * <pre>
14830       * total number of snapshots
14831       * </pre>
14832       */
14833      public Builder setNumSnapshots(int value) {
14834        bitField0_ |= 0x00000004;
14835        numSnapshots_ = value;
14836        onChanged();
14837        return this;
14838      }
14839      /**
14840       * <code>optional uint32 numSnapshots = 3;</code>
14841       *
14842       * <pre>
14843       * total number of snapshots
14844       * </pre>
14845       */
14846      public Builder clearNumSnapshots() {
14847        bitField0_ = (bitField0_ & ~0x00000004);
14848        numSnapshots_ = 0;
14849        onChanged();
14850        return this;
14851      }
14852
14853      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotSection)
14854    }
14855
14856    static {
14857      defaultInstance = new SnapshotSection(true);
14858      defaultInstance.initFields();
14859    }
14860
14861    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotSection)
14862  }
14863
14864  public interface SnapshotDiffSectionOrBuilder
14865      extends com.google.protobuf.MessageOrBuilder {
14866  }
14867  /**
14868   * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection}
14869   *
14870   * <pre>
14871   **
14872   * This section records information about snapshot diffs
14873   * NAME: SNAPSHOT_DIFF
14874   * </pre>
14875   */
14876  public static final class SnapshotDiffSection extends
14877      com.google.protobuf.GeneratedMessage
14878      implements SnapshotDiffSectionOrBuilder {
14879    // Use SnapshotDiffSection.newBuilder() to construct.
14880    private SnapshotDiffSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
14881      super(builder);
14882      this.unknownFields = builder.getUnknownFields();
14883    }
14884    private SnapshotDiffSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
14885
14886    private static final SnapshotDiffSection defaultInstance;
14887    public static SnapshotDiffSection getDefaultInstance() {
14888      return defaultInstance;
14889    }
14890
14891    public SnapshotDiffSection getDefaultInstanceForType() {
14892      return defaultInstance;
14893    }
14894
14895    private final com.google.protobuf.UnknownFieldSet unknownFields;
14896    @java.lang.Override
14897    public final com.google.protobuf.UnknownFieldSet
14898        getUnknownFields() {
14899      return this.unknownFields;
14900    }
14901    private SnapshotDiffSection(
14902        com.google.protobuf.CodedInputStream input,
14903        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14904        throws com.google.protobuf.InvalidProtocolBufferException {
14905      initFields();
14906      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
14907          com.google.protobuf.UnknownFieldSet.newBuilder();
14908      try {
14909        boolean done = false;
14910        while (!done) {
14911          int tag = input.readTag();
14912          switch (tag) {
14913            case 0:
14914              done = true;
14915              break;
14916            default: {
14917              if (!parseUnknownField(input, unknownFields,
14918                                     extensionRegistry, tag)) {
14919                done = true;
14920              }
14921              break;
14922            }
14923          }
14924        }
14925      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14926        throw e.setUnfinishedMessage(this);
14927      } catch (java.io.IOException e) {
14928        throw new com.google.protobuf.InvalidProtocolBufferException(
14929            e.getMessage()).setUnfinishedMessage(this);
14930      } finally {
14931        this.unknownFields = unknownFields.build();
14932        makeExtensionsImmutable();
14933      }
14934    }
14935    public static final com.google.protobuf.Descriptors.Descriptor
14936        getDescriptor() {
14937      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
14938    }
14939
14940    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14941        internalGetFieldAccessorTable() {
14942      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable
14943          .ensureFieldAccessorsInitialized(
14944              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.Builder.class);
14945    }
14946
14947    public static com.google.protobuf.Parser<SnapshotDiffSection> PARSER =
14948        new com.google.protobuf.AbstractParser<SnapshotDiffSection>() {
14949      public SnapshotDiffSection parsePartialFrom(
14950          com.google.protobuf.CodedInputStream input,
14951          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14952          throws com.google.protobuf.InvalidProtocolBufferException {
14953        return new SnapshotDiffSection(input, extensionRegistry);
14954      }
14955    };
14956
14957    @java.lang.Override
14958    public com.google.protobuf.Parser<SnapshotDiffSection> getParserForType() {
14959      return PARSER;
14960    }
14961
14962    public interface CreatedListEntryOrBuilder
14963        extends com.google.protobuf.MessageOrBuilder {
14964
14965      // optional bytes name = 1;
14966      /**
14967       * <code>optional bytes name = 1;</code>
14968       */
14969      boolean hasName();
14970      /**
14971       * <code>optional bytes name = 1;</code>
14972       */
14973      com.google.protobuf.ByteString getName();
14974    }
14975    /**
14976     * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry}
14977     */
14978    public static final class CreatedListEntry extends
14979        com.google.protobuf.GeneratedMessage
14980        implements CreatedListEntryOrBuilder {
14981      // Use CreatedListEntry.newBuilder() to construct.
14982      private CreatedListEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
14983        super(builder);
14984        this.unknownFields = builder.getUnknownFields();
14985      }
14986      private CreatedListEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
14987
14988      private static final CreatedListEntry defaultInstance;
14989      public static CreatedListEntry getDefaultInstance() {
14990        return defaultInstance;
14991      }
14992
14993      public CreatedListEntry getDefaultInstanceForType() {
14994        return defaultInstance;
14995      }
14996
14997      private final com.google.protobuf.UnknownFieldSet unknownFields;
14998      @java.lang.Override
14999      public final com.google.protobuf.UnknownFieldSet
15000          getUnknownFields() {
15001        return this.unknownFields;
15002      }
15003      private CreatedListEntry(
15004          com.google.protobuf.CodedInputStream input,
15005          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15006          throws com.google.protobuf.InvalidProtocolBufferException {
15007        initFields();
15008        int mutable_bitField0_ = 0;
15009        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15010            com.google.protobuf.UnknownFieldSet.newBuilder();
15011        try {
15012          boolean done = false;
15013          while (!done) {
15014            int tag = input.readTag();
15015            switch (tag) {
15016              case 0:
15017                done = true;
15018                break;
15019              default: {
15020                if (!parseUnknownField(input, unknownFields,
15021                                       extensionRegistry, tag)) {
15022                  done = true;
15023                }
15024                break;
15025              }
15026              case 10: {
15027                bitField0_ |= 0x00000001;
15028                name_ = input.readBytes();
15029                break;
15030              }
15031            }
15032          }
15033        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15034          throw e.setUnfinishedMessage(this);
15035        } catch (java.io.IOException e) {
15036          throw new com.google.protobuf.InvalidProtocolBufferException(
15037              e.getMessage()).setUnfinishedMessage(this);
15038        } finally {
15039          this.unknownFields = unknownFields.build();
15040          makeExtensionsImmutable();
15041        }
15042      }
15043      public static final com.google.protobuf.Descriptors.Descriptor
15044          getDescriptor() {
15045        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
15046      }
15047
15048      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15049          internalGetFieldAccessorTable() {
15050        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable
15051            .ensureFieldAccessorsInitialized(
15052                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.Builder.class);
15053      }
15054
15055      public static com.google.protobuf.Parser<CreatedListEntry> PARSER =
15056          new com.google.protobuf.AbstractParser<CreatedListEntry>() {
15057        public CreatedListEntry parsePartialFrom(
15058            com.google.protobuf.CodedInputStream input,
15059            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15060            throws com.google.protobuf.InvalidProtocolBufferException {
15061          return new CreatedListEntry(input, extensionRegistry);
15062        }
15063      };
15064
15065      @java.lang.Override
15066      public com.google.protobuf.Parser<CreatedListEntry> getParserForType() {
15067        return PARSER;
15068      }
15069
15070      private int bitField0_;
15071      // optional bytes name = 1;
15072      public static final int NAME_FIELD_NUMBER = 1;
15073      private com.google.protobuf.ByteString name_;
15074      /**
15075       * <code>optional bytes name = 1;</code>
15076       */
15077      public boolean hasName() {
15078        return ((bitField0_ & 0x00000001) == 0x00000001);
15079      }
15080      /**
15081       * <code>optional bytes name = 1;</code>
15082       */
15083      public com.google.protobuf.ByteString getName() {
15084        return name_;
15085      }
15086
15087      private void initFields() {
15088        name_ = com.google.protobuf.ByteString.EMPTY;
15089      }
15090      private byte memoizedIsInitialized = -1;
15091      public final boolean isInitialized() {
15092        byte isInitialized = memoizedIsInitialized;
15093        if (isInitialized != -1) return isInitialized == 1;
15094
15095        memoizedIsInitialized = 1;
15096        return true;
15097      }
15098
15099      public void writeTo(com.google.protobuf.CodedOutputStream output)
15100                          throws java.io.IOException {
15101        getSerializedSize();
15102        if (((bitField0_ & 0x00000001) == 0x00000001)) {
15103          output.writeBytes(1, name_);
15104        }
15105        getUnknownFields().writeTo(output);
15106      }
15107
15108      private int memoizedSerializedSize = -1;
15109      public int getSerializedSize() {
15110        int size = memoizedSerializedSize;
15111        if (size != -1) return size;
15112
15113        size = 0;
15114        if (((bitField0_ & 0x00000001) == 0x00000001)) {
15115          size += com.google.protobuf.CodedOutputStream
15116            .computeBytesSize(1, name_);
15117        }
15118        size += getUnknownFields().getSerializedSize();
15119        memoizedSerializedSize = size;
15120        return size;
15121      }
15122
15123      private static final long serialVersionUID = 0L;
15124      @java.lang.Override
15125      protected java.lang.Object writeReplace()
15126          throws java.io.ObjectStreamException {
15127        return super.writeReplace();
15128      }
15129
15130      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15131          com.google.protobuf.ByteString data)
15132          throws com.google.protobuf.InvalidProtocolBufferException {
15133        return PARSER.parseFrom(data);
15134      }
15135      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15136          com.google.protobuf.ByteString data,
15137          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15138          throws com.google.protobuf.InvalidProtocolBufferException {
15139        return PARSER.parseFrom(data, extensionRegistry);
15140      }
15141      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(byte[] data)
15142          throws com.google.protobuf.InvalidProtocolBufferException {
15143        return PARSER.parseFrom(data);
15144      }
15145      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15146          byte[] data,
15147          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15148          throws com.google.protobuf.InvalidProtocolBufferException {
15149        return PARSER.parseFrom(data, extensionRegistry);
15150      }
15151      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(java.io.InputStream input)
15152          throws java.io.IOException {
15153        return PARSER.parseFrom(input);
15154      }
15155      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15156          java.io.InputStream input,
15157          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15158          throws java.io.IOException {
15159        return PARSER.parseFrom(input, extensionRegistry);
15160      }
15161      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseDelimitedFrom(java.io.InputStream input)
15162          throws java.io.IOException {
15163        return PARSER.parseDelimitedFrom(input);
15164      }
15165      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseDelimitedFrom(
15166          java.io.InputStream input,
15167          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15168          throws java.io.IOException {
15169        return PARSER.parseDelimitedFrom(input, extensionRegistry);
15170      }
15171      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15172          com.google.protobuf.CodedInputStream input)
15173          throws java.io.IOException {
15174        return PARSER.parseFrom(input);
15175      }
15176      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parseFrom(
15177          com.google.protobuf.CodedInputStream input,
15178          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15179          throws java.io.IOException {
15180        return PARSER.parseFrom(input, extensionRegistry);
15181      }
15182
15183      public static Builder newBuilder() { return Builder.create(); }
15184      public Builder newBuilderForType() { return newBuilder(); }
15185      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry prototype) {
15186        return newBuilder().mergeFrom(prototype);
15187      }
15188      public Builder toBuilder() { return newBuilder(this); }
15189
15190      @java.lang.Override
15191      protected Builder newBuilderForType(
15192          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15193        Builder builder = new Builder(parent);
15194        return builder;
15195      }
15196      /**
15197       * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry}
15198       */
15199      public static final class Builder extends
15200          com.google.protobuf.GeneratedMessage.Builder<Builder>
15201         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntryOrBuilder {
15202        public static final com.google.protobuf.Descriptors.Descriptor
15203            getDescriptor() {
15204          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
15205        }
15206
15207        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15208            internalGetFieldAccessorTable() {
15209          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable
15210              .ensureFieldAccessorsInitialized(
15211                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.Builder.class);
15212        }
15213
15214        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.newBuilder()
15215        private Builder() {
15216          maybeForceBuilderInitialization();
15217        }
15218
15219        private Builder(
15220            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15221          super(parent);
15222          maybeForceBuilderInitialization();
15223        }
15224        private void maybeForceBuilderInitialization() {
15225          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
15226          }
15227        }
15228        private static Builder create() {
15229          return new Builder();
15230        }
15231
15232        public Builder clear() {
15233          super.clear();
15234          name_ = com.google.protobuf.ByteString.EMPTY;
15235          bitField0_ = (bitField0_ & ~0x00000001);
15236          return this;
15237        }
15238
15239        public Builder clone() {
15240          return create().mergeFrom(buildPartial());
15241        }
15242
15243        public com.google.protobuf.Descriptors.Descriptor
15244            getDescriptorForType() {
15245          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
15246        }
15247
15248        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry getDefaultInstanceForType() {
15249          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.getDefaultInstance();
15250        }
15251
15252        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry build() {
15253          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry result = buildPartial();
15254          if (!result.isInitialized()) {
15255            throw newUninitializedMessageException(result);
15256          }
15257          return result;
15258        }
15259
15260        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry buildPartial() {
15261          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry(this);
15262          int from_bitField0_ = bitField0_;
15263          int to_bitField0_ = 0;
15264          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
15265            to_bitField0_ |= 0x00000001;
15266          }
15267          result.name_ = name_;
15268          result.bitField0_ = to_bitField0_;
15269          onBuilt();
15270          return result;
15271        }
15272
15273        public Builder mergeFrom(com.google.protobuf.Message other) {
15274          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry) {
15275            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry)other);
15276          } else {
15277            super.mergeFrom(other);
15278            return this;
15279          }
15280        }
15281
15282        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry other) {
15283          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry.getDefaultInstance()) return this;
15284          if (other.hasName()) {
15285            setName(other.getName());
15286          }
15287          this.mergeUnknownFields(other.getUnknownFields());
15288          return this;
15289        }
15290
15291        public final boolean isInitialized() {
15292          return true;
15293        }
15294
15295        public Builder mergeFrom(
15296            com.google.protobuf.CodedInputStream input,
15297            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15298            throws java.io.IOException {
15299          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry parsedMessage = null;
15300          try {
15301            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15302          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15303            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry) e.getUnfinishedMessage();
15304            throw e;
15305          } finally {
15306            if (parsedMessage != null) {
15307              mergeFrom(parsedMessage);
15308            }
15309          }
15310          return this;
15311        }
15312        private int bitField0_;
15313
15314        // optional bytes name = 1;
15315        private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
15316        /**
15317         * <code>optional bytes name = 1;</code>
15318         */
15319        public boolean hasName() {
15320          return ((bitField0_ & 0x00000001) == 0x00000001);
15321        }
15322        /**
15323         * <code>optional bytes name = 1;</code>
15324         */
15325        public com.google.protobuf.ByteString getName() {
15326          return name_;
15327        }
15328        /**
15329         * <code>optional bytes name = 1;</code>
15330         */
15331        public Builder setName(com.google.protobuf.ByteString value) {
15332          if (value == null) {
15333    throw new NullPointerException();
15334  }
15335  bitField0_ |= 0x00000001;
15336          name_ = value;
15337          onChanged();
15338          return this;
15339        }
15340        /**
15341         * <code>optional bytes name = 1;</code>
15342         */
15343        public Builder clearName() {
15344          bitField0_ = (bitField0_ & ~0x00000001);
15345          name_ = getDefaultInstance().getName();
15346          onChanged();
15347          return this;
15348        }
15349
15350        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry)
15351      }
15352
15353      static {
15354        defaultInstance = new CreatedListEntry(true);
15355        defaultInstance.initFields();
15356      }
15357
15358      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.CreatedListEntry)
15359    }
15360
15361    public interface DirectoryDiffOrBuilder
15362        extends com.google.protobuf.MessageOrBuilder {
15363
15364      // optional uint32 snapshotId = 1;
15365      /**
15366       * <code>optional uint32 snapshotId = 1;</code>
15367       */
15368      boolean hasSnapshotId();
15369      /**
15370       * <code>optional uint32 snapshotId = 1;</code>
15371       */
15372      int getSnapshotId();
15373
15374      // optional uint32 childrenSize = 2;
15375      /**
15376       * <code>optional uint32 childrenSize = 2;</code>
15377       */
15378      boolean hasChildrenSize();
15379      /**
15380       * <code>optional uint32 childrenSize = 2;</code>
15381       */
15382      int getChildrenSize();
15383
15384      // optional bool isSnapshotRoot = 3;
15385      /**
15386       * <code>optional bool isSnapshotRoot = 3;</code>
15387       */
15388      boolean hasIsSnapshotRoot();
15389      /**
15390       * <code>optional bool isSnapshotRoot = 3;</code>
15391       */
15392      boolean getIsSnapshotRoot();
15393
15394      // optional bytes name = 4;
15395      /**
15396       * <code>optional bytes name = 4;</code>
15397       */
15398      boolean hasName();
15399      /**
15400       * <code>optional bytes name = 4;</code>
15401       */
15402      com.google.protobuf.ByteString getName();
15403
15404      // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;
15405      /**
15406       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15407       */
15408      boolean hasSnapshotCopy();
15409      /**
15410       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15411       */
15412      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getSnapshotCopy();
15413      /**
15414       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15415       */
15416      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getSnapshotCopyOrBuilder();
15417
15418      // optional uint32 createdListSize = 6;
15419      /**
15420       * <code>optional uint32 createdListSize = 6;</code>
15421       */
15422      boolean hasCreatedListSize();
15423      /**
15424       * <code>optional uint32 createdListSize = 6;</code>
15425       */
15426      int getCreatedListSize();
15427
15428      // repeated uint64 deletedINode = 7 [packed = true];
15429      /**
15430       * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15431       *
15432       * <pre>
15433       * id of deleted inodes
15434       * </pre>
15435       */
15436      java.util.List<java.lang.Long> getDeletedINodeList();
15437      /**
15438       * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15439       *
15440       * <pre>
15441       * id of deleted inodes
15442       * </pre>
15443       */
15444      int getDeletedINodeCount();
15445      /**
15446       * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15447       *
15448       * <pre>
15449       * id of deleted inodes
15450       * </pre>
15451       */
15452      long getDeletedINode(int index);
15453
15454      // repeated uint32 deletedINodeRef = 8 [packed = true];
15455      /**
15456       * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15457       *
15458       * <pre>
15459       * id of reference nodes in the deleted list
15460       * </pre>
15461       */
15462      java.util.List<java.lang.Integer> getDeletedINodeRefList();
15463      /**
15464       * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15465       *
15466       * <pre>
15467       * id of reference nodes in the deleted list
15468       * </pre>
15469       */
15470      int getDeletedINodeRefCount();
15471      /**
15472       * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15473       *
15474       * <pre>
15475       * id of reference nodes in the deleted list
15476       * </pre>
15477       */
15478      int getDeletedINodeRef(int index);
15479    }
15480    /**
15481     * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff}
15482     */
15483    public static final class DirectoryDiff extends
15484        com.google.protobuf.GeneratedMessage
15485        implements DirectoryDiffOrBuilder {
15486      // Use DirectoryDiff.newBuilder() to construct.
15487      private DirectoryDiff(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
15488        super(builder);
15489        this.unknownFields = builder.getUnknownFields();
15490      }
15491      private DirectoryDiff(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
15492
15493      private static final DirectoryDiff defaultInstance;
15494      public static DirectoryDiff getDefaultInstance() {
15495        return defaultInstance;
15496      }
15497
15498      public DirectoryDiff getDefaultInstanceForType() {
15499        return defaultInstance;
15500      }
15501
15502      private final com.google.protobuf.UnknownFieldSet unknownFields;
15503      @java.lang.Override
15504      public final com.google.protobuf.UnknownFieldSet
15505          getUnknownFields() {
15506        return this.unknownFields;
15507      }
15508      private DirectoryDiff(
15509          com.google.protobuf.CodedInputStream input,
15510          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15511          throws com.google.protobuf.InvalidProtocolBufferException {
15512        initFields();
15513        int mutable_bitField0_ = 0;
15514        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15515            com.google.protobuf.UnknownFieldSet.newBuilder();
15516        try {
15517          boolean done = false;
15518          while (!done) {
15519            int tag = input.readTag();
15520            switch (tag) {
15521              case 0:
15522                done = true;
15523                break;
15524              default: {
15525                if (!parseUnknownField(input, unknownFields,
15526                                       extensionRegistry, tag)) {
15527                  done = true;
15528                }
15529                break;
15530              }
15531              case 8: {
15532                bitField0_ |= 0x00000001;
15533                snapshotId_ = input.readUInt32();
15534                break;
15535              }
15536              case 16: {
15537                bitField0_ |= 0x00000002;
15538                childrenSize_ = input.readUInt32();
15539                break;
15540              }
15541              case 24: {
15542                bitField0_ |= 0x00000004;
15543                isSnapshotRoot_ = input.readBool();
15544                break;
15545              }
15546              case 34: {
15547                bitField0_ |= 0x00000008;
15548                name_ = input.readBytes();
15549                break;
15550              }
15551              case 42: {
15552                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder subBuilder = null;
15553                if (((bitField0_ & 0x00000010) == 0x00000010)) {
15554                  subBuilder = snapshotCopy_.toBuilder();
15555                }
15556                snapshotCopy_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.PARSER, extensionRegistry);
15557                if (subBuilder != null) {
15558                  subBuilder.mergeFrom(snapshotCopy_);
15559                  snapshotCopy_ = subBuilder.buildPartial();
15560                }
15561                bitField0_ |= 0x00000010;
15562                break;
15563              }
15564              case 48: {
15565                bitField0_ |= 0x00000020;
15566                createdListSize_ = input.readUInt32();
15567                break;
15568              }
15569              case 56: {
15570                if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
15571                  deletedINode_ = new java.util.ArrayList<java.lang.Long>();
15572                  mutable_bitField0_ |= 0x00000040;
15573                }
15574                deletedINode_.add(input.readUInt64());
15575                break;
15576              }
15577              case 58: {
15578                int length = input.readRawVarint32();
15579                int limit = input.pushLimit(length);
15580                if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) {
15581                  deletedINode_ = new java.util.ArrayList<java.lang.Long>();
15582                  mutable_bitField0_ |= 0x00000040;
15583                }
15584                while (input.getBytesUntilLimit() > 0) {
15585                  deletedINode_.add(input.readUInt64());
15586                }
15587                input.popLimit(limit);
15588                break;
15589              }
15590              case 64: {
15591                if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
15592                  deletedINodeRef_ = new java.util.ArrayList<java.lang.Integer>();
15593                  mutable_bitField0_ |= 0x00000080;
15594                }
15595                deletedINodeRef_.add(input.readUInt32());
15596                break;
15597              }
15598              case 66: {
15599                int length = input.readRawVarint32();
15600                int limit = input.pushLimit(length);
15601                if (!((mutable_bitField0_ & 0x00000080) == 0x00000080) && input.getBytesUntilLimit() > 0) {
15602                  deletedINodeRef_ = new java.util.ArrayList<java.lang.Integer>();
15603                  mutable_bitField0_ |= 0x00000080;
15604                }
15605                while (input.getBytesUntilLimit() > 0) {
15606                  deletedINodeRef_.add(input.readUInt32());
15607                }
15608                input.popLimit(limit);
15609                break;
15610              }
15611            }
15612          }
15613        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15614          throw e.setUnfinishedMessage(this);
15615        } catch (java.io.IOException e) {
15616          throw new com.google.protobuf.InvalidProtocolBufferException(
15617              e.getMessage()).setUnfinishedMessage(this);
15618        } finally {
15619          if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
15620            deletedINode_ = java.util.Collections.unmodifiableList(deletedINode_);
15621          }
15622          if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
15623            deletedINodeRef_ = java.util.Collections.unmodifiableList(deletedINodeRef_);
15624          }
15625          this.unknownFields = unknownFields.build();
15626          makeExtensionsImmutable();
15627        }
15628      }
15629      public static final com.google.protobuf.Descriptors.Descriptor
15630          getDescriptor() {
15631        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
15632      }
15633
15634      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15635          internalGetFieldAccessorTable() {
15636        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable
15637            .ensureFieldAccessorsInitialized(
15638                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.Builder.class);
15639      }
15640
15641      public static com.google.protobuf.Parser<DirectoryDiff> PARSER =
15642          new com.google.protobuf.AbstractParser<DirectoryDiff>() {
15643        public DirectoryDiff parsePartialFrom(
15644            com.google.protobuf.CodedInputStream input,
15645            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15646            throws com.google.protobuf.InvalidProtocolBufferException {
15647          return new DirectoryDiff(input, extensionRegistry);
15648        }
15649      };
15650
15651      @java.lang.Override
15652      public com.google.protobuf.Parser<DirectoryDiff> getParserForType() {
15653        return PARSER;
15654      }
15655
15656      private int bitField0_;
15657      // optional uint32 snapshotId = 1;
15658      public static final int SNAPSHOTID_FIELD_NUMBER = 1;
15659      private int snapshotId_;
15660      /**
15661       * <code>optional uint32 snapshotId = 1;</code>
15662       */
15663      public boolean hasSnapshotId() {
15664        return ((bitField0_ & 0x00000001) == 0x00000001);
15665      }
15666      /**
15667       * <code>optional uint32 snapshotId = 1;</code>
15668       */
15669      public int getSnapshotId() {
15670        return snapshotId_;
15671      }
15672
15673      // optional uint32 childrenSize = 2;
15674      public static final int CHILDRENSIZE_FIELD_NUMBER = 2;
15675      private int childrenSize_;
15676      /**
15677       * <code>optional uint32 childrenSize = 2;</code>
15678       */
15679      public boolean hasChildrenSize() {
15680        return ((bitField0_ & 0x00000002) == 0x00000002);
15681      }
15682      /**
15683       * <code>optional uint32 childrenSize = 2;</code>
15684       */
15685      public int getChildrenSize() {
15686        return childrenSize_;
15687      }
15688
15689      // optional bool isSnapshotRoot = 3;
15690      public static final int ISSNAPSHOTROOT_FIELD_NUMBER = 3;
15691      private boolean isSnapshotRoot_;
15692      /**
15693       * <code>optional bool isSnapshotRoot = 3;</code>
15694       */
15695      public boolean hasIsSnapshotRoot() {
15696        return ((bitField0_ & 0x00000004) == 0x00000004);
15697      }
15698      /**
15699       * <code>optional bool isSnapshotRoot = 3;</code>
15700       */
15701      public boolean getIsSnapshotRoot() {
15702        return isSnapshotRoot_;
15703      }
15704
15705      // optional bytes name = 4;
15706      public static final int NAME_FIELD_NUMBER = 4;
15707      private com.google.protobuf.ByteString name_;
15708      /**
15709       * <code>optional bytes name = 4;</code>
15710       */
15711      public boolean hasName() {
15712        return ((bitField0_ & 0x00000008) == 0x00000008);
15713      }
15714      /**
15715       * <code>optional bytes name = 4;</code>
15716       */
15717      public com.google.protobuf.ByteString getName() {
15718        return name_;
15719      }
15720
15721      // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;
15722      public static final int SNAPSHOTCOPY_FIELD_NUMBER = 5;
15723      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory snapshotCopy_;
15724      /**
15725       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15726       */
15727      public boolean hasSnapshotCopy() {
15728        return ((bitField0_ & 0x00000010) == 0x00000010);
15729      }
15730      /**
15731       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15732       */
15733      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getSnapshotCopy() {
15734        return snapshotCopy_;
15735      }
15736      /**
15737       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
15738       */
15739      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getSnapshotCopyOrBuilder() {
15740        return snapshotCopy_;
15741      }
15742
15743      // optional uint32 createdListSize = 6;
15744      public static final int CREATEDLISTSIZE_FIELD_NUMBER = 6;
15745      private int createdListSize_;
15746      /**
15747       * <code>optional uint32 createdListSize = 6;</code>
15748       */
15749      public boolean hasCreatedListSize() {
15750        return ((bitField0_ & 0x00000020) == 0x00000020);
15751      }
15752      /**
15753       * <code>optional uint32 createdListSize = 6;</code>
15754       */
15755      public int getCreatedListSize() {
15756        return createdListSize_;
15757      }
15758
15759      // repeated uint64 deletedINode = 7 [packed = true];
15760      public static final int DELETEDINODE_FIELD_NUMBER = 7;
15761      private java.util.List<java.lang.Long> deletedINode_;
15762      /**
15763       * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15764       *
15765       * <pre>
15766       * id of deleted inodes
15767       * </pre>
15768       */
15769      public java.util.List<java.lang.Long>
15770          getDeletedINodeList() {
15771        return deletedINode_;
15772      }
15773      /**
15774       * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15775       *
15776       * <pre>
15777       * id of deleted inodes
15778       * </pre>
15779       */
15780      public int getDeletedINodeCount() {
15781        return deletedINode_.size();
15782      }
15783      /**
15784       * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
15785       *
15786       * <pre>
15787       * id of deleted inodes
15788       * </pre>
15789       */
15790      public long getDeletedINode(int index) {
15791        return deletedINode_.get(index);
15792      }
15793      private int deletedINodeMemoizedSerializedSize = -1;
15794
15795      // repeated uint32 deletedINodeRef = 8 [packed = true];
15796      public static final int DELETEDINODEREF_FIELD_NUMBER = 8;
15797      private java.util.List<java.lang.Integer> deletedINodeRef_;
15798      /**
15799       * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15800       *
15801       * <pre>
15802       * id of reference nodes in the deleted list
15803       * </pre>
15804       */
15805      public java.util.List<java.lang.Integer>
15806          getDeletedINodeRefList() {
15807        return deletedINodeRef_;
15808      }
15809      /**
15810       * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15811       *
15812       * <pre>
15813       * id of reference nodes in the deleted list
15814       * </pre>
15815       */
15816      public int getDeletedINodeRefCount() {
15817        return deletedINodeRef_.size();
15818      }
15819      /**
15820       * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
15821       *
15822       * <pre>
15823       * id of reference nodes in the deleted list
15824       * </pre>
15825       */
15826      public int getDeletedINodeRef(int index) {
15827        return deletedINodeRef_.get(index);
15828      }
15829      private int deletedINodeRefMemoizedSerializedSize = -1;
15830
15831      private void initFields() {
15832        snapshotId_ = 0;
15833        childrenSize_ = 0;
15834        isSnapshotRoot_ = false;
15835        name_ = com.google.protobuf.ByteString.EMPTY;
15836        snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
15837        createdListSize_ = 0;
15838        deletedINode_ = java.util.Collections.emptyList();
15839        deletedINodeRef_ = java.util.Collections.emptyList();
15840      }
15841      private byte memoizedIsInitialized = -1;
15842      public final boolean isInitialized() {
15843        byte isInitialized = memoizedIsInitialized;
15844        if (isInitialized != -1) return isInitialized == 1;
15845
15846        if (hasSnapshotCopy()) {
15847          if (!getSnapshotCopy().isInitialized()) {
15848            memoizedIsInitialized = 0;
15849            return false;
15850          }
15851        }
15852        memoizedIsInitialized = 1;
15853        return true;
15854      }
15855
15856      public void writeTo(com.google.protobuf.CodedOutputStream output)
15857                          throws java.io.IOException {
15858        getSerializedSize();
15859        if (((bitField0_ & 0x00000001) == 0x00000001)) {
15860          output.writeUInt32(1, snapshotId_);
15861        }
15862        if (((bitField0_ & 0x00000002) == 0x00000002)) {
15863          output.writeUInt32(2, childrenSize_);
15864        }
15865        if (((bitField0_ & 0x00000004) == 0x00000004)) {
15866          output.writeBool(3, isSnapshotRoot_);
15867        }
15868        if (((bitField0_ & 0x00000008) == 0x00000008)) {
15869          output.writeBytes(4, name_);
15870        }
15871        if (((bitField0_ & 0x00000010) == 0x00000010)) {
15872          output.writeMessage(5, snapshotCopy_);
15873        }
15874        if (((bitField0_ & 0x00000020) == 0x00000020)) {
15875          output.writeUInt32(6, createdListSize_);
15876        }
15877        if (getDeletedINodeList().size() > 0) {
15878          output.writeRawVarint32(58);
15879          output.writeRawVarint32(deletedINodeMemoizedSerializedSize);
15880        }
15881        for (int i = 0; i < deletedINode_.size(); i++) {
15882          output.writeUInt64NoTag(deletedINode_.get(i));
15883        }
15884        if (getDeletedINodeRefList().size() > 0) {
15885          output.writeRawVarint32(66);
15886          output.writeRawVarint32(deletedINodeRefMemoizedSerializedSize);
15887        }
15888        for (int i = 0; i < deletedINodeRef_.size(); i++) {
15889          output.writeUInt32NoTag(deletedINodeRef_.get(i));
15890        }
15891        getUnknownFields().writeTo(output);
15892      }
15893
15894      private int memoizedSerializedSize = -1;
15895      public int getSerializedSize() {
15896        int size = memoizedSerializedSize;
15897        if (size != -1) return size;
15898
15899        size = 0;
15900        if (((bitField0_ & 0x00000001) == 0x00000001)) {
15901          size += com.google.protobuf.CodedOutputStream
15902            .computeUInt32Size(1, snapshotId_);
15903        }
15904        if (((bitField0_ & 0x00000002) == 0x00000002)) {
15905          size += com.google.protobuf.CodedOutputStream
15906            .computeUInt32Size(2, childrenSize_);
15907        }
15908        if (((bitField0_ & 0x00000004) == 0x00000004)) {
15909          size += com.google.protobuf.CodedOutputStream
15910            .computeBoolSize(3, isSnapshotRoot_);
15911        }
15912        if (((bitField0_ & 0x00000008) == 0x00000008)) {
15913          size += com.google.protobuf.CodedOutputStream
15914            .computeBytesSize(4, name_);
15915        }
15916        if (((bitField0_ & 0x00000010) == 0x00000010)) {
15917          size += com.google.protobuf.CodedOutputStream
15918            .computeMessageSize(5, snapshotCopy_);
15919        }
15920        if (((bitField0_ & 0x00000020) == 0x00000020)) {
15921          size += com.google.protobuf.CodedOutputStream
15922            .computeUInt32Size(6, createdListSize_);
15923        }
15924        {
15925          int dataSize = 0;
15926          for (int i = 0; i < deletedINode_.size(); i++) {
15927            dataSize += com.google.protobuf.CodedOutputStream
15928              .computeUInt64SizeNoTag(deletedINode_.get(i));
15929          }
15930          size += dataSize;
15931          if (!getDeletedINodeList().isEmpty()) {
15932            size += 1;
15933            size += com.google.protobuf.CodedOutputStream
15934                .computeInt32SizeNoTag(dataSize);
15935          }
15936          deletedINodeMemoizedSerializedSize = dataSize;
15937        }
15938        {
15939          int dataSize = 0;
15940          for (int i = 0; i < deletedINodeRef_.size(); i++) {
15941            dataSize += com.google.protobuf.CodedOutputStream
15942              .computeUInt32SizeNoTag(deletedINodeRef_.get(i));
15943          }
15944          size += dataSize;
15945          if (!getDeletedINodeRefList().isEmpty()) {
15946            size += 1;
15947            size += com.google.protobuf.CodedOutputStream
15948                .computeInt32SizeNoTag(dataSize);
15949          }
15950          deletedINodeRefMemoizedSerializedSize = dataSize;
15951        }
15952        size += getUnknownFields().getSerializedSize();
15953        memoizedSerializedSize = size;
15954        return size;
15955      }
15956
15957      private static final long serialVersionUID = 0L;
15958      @java.lang.Override
15959      protected java.lang.Object writeReplace()
15960          throws java.io.ObjectStreamException {
15961        return super.writeReplace();
15962      }
15963
15964      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
15965          com.google.protobuf.ByteString data)
15966          throws com.google.protobuf.InvalidProtocolBufferException {
15967        return PARSER.parseFrom(data);
15968      }
15969      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
15970          com.google.protobuf.ByteString data,
15971          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15972          throws com.google.protobuf.InvalidProtocolBufferException {
15973        return PARSER.parseFrom(data, extensionRegistry);
15974      }
15975      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(byte[] data)
15976          throws com.google.protobuf.InvalidProtocolBufferException {
15977        return PARSER.parseFrom(data);
15978      }
15979      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
15980          byte[] data,
15981          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15982          throws com.google.protobuf.InvalidProtocolBufferException {
15983        return PARSER.parseFrom(data, extensionRegistry);
15984      }
15985      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(java.io.InputStream input)
15986          throws java.io.IOException {
15987        return PARSER.parseFrom(input);
15988      }
15989      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
15990          java.io.InputStream input,
15991          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15992          throws java.io.IOException {
15993        return PARSER.parseFrom(input, extensionRegistry);
15994      }
15995      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseDelimitedFrom(java.io.InputStream input)
15996          throws java.io.IOException {
15997        return PARSER.parseDelimitedFrom(input);
15998      }
15999      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseDelimitedFrom(
16000          java.io.InputStream input,
16001          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16002          throws java.io.IOException {
16003        return PARSER.parseDelimitedFrom(input, extensionRegistry);
16004      }
16005      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
16006          com.google.protobuf.CodedInputStream input)
16007          throws java.io.IOException {
16008        return PARSER.parseFrom(input);
16009      }
16010      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parseFrom(
16011          com.google.protobuf.CodedInputStream input,
16012          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16013          throws java.io.IOException {
16014        return PARSER.parseFrom(input, extensionRegistry);
16015      }
16016
16017      public static Builder newBuilder() { return Builder.create(); }
16018      public Builder newBuilderForType() { return newBuilder(); }
16019      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff prototype) {
16020        return newBuilder().mergeFrom(prototype);
16021      }
16022      public Builder toBuilder() { return newBuilder(this); }
16023
16024      @java.lang.Override
16025      protected Builder newBuilderForType(
16026          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16027        Builder builder = new Builder(parent);
16028        return builder;
16029      }
16030      /**
16031       * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff}
16032       */
16033      public static final class Builder extends
16034          com.google.protobuf.GeneratedMessage.Builder<Builder>
16035         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiffOrBuilder {
16036        public static final com.google.protobuf.Descriptors.Descriptor
16037            getDescriptor() {
16038          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
16039        }
16040
16041        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16042            internalGetFieldAccessorTable() {
16043          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable
16044              .ensureFieldAccessorsInitialized(
16045                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.Builder.class);
16046        }
16047
16048        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.newBuilder()
16049        private Builder() {
16050          maybeForceBuilderInitialization();
16051        }
16052
16053        private Builder(
16054            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16055          super(parent);
16056          maybeForceBuilderInitialization();
16057        }
16058        private void maybeForceBuilderInitialization() {
16059          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
16060            getSnapshotCopyFieldBuilder();
16061          }
16062        }
16063        private static Builder create() {
16064          return new Builder();
16065        }
16066
16067        public Builder clear() {
16068          super.clear();
16069          snapshotId_ = 0;
16070          bitField0_ = (bitField0_ & ~0x00000001);
16071          childrenSize_ = 0;
16072          bitField0_ = (bitField0_ & ~0x00000002);
16073          isSnapshotRoot_ = false;
16074          bitField0_ = (bitField0_ & ~0x00000004);
16075          name_ = com.google.protobuf.ByteString.EMPTY;
16076          bitField0_ = (bitField0_ & ~0x00000008);
16077          if (snapshotCopyBuilder_ == null) {
16078            snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
16079          } else {
16080            snapshotCopyBuilder_.clear();
16081          }
16082          bitField0_ = (bitField0_ & ~0x00000010);
16083          createdListSize_ = 0;
16084          bitField0_ = (bitField0_ & ~0x00000020);
16085          deletedINode_ = java.util.Collections.emptyList();
16086          bitField0_ = (bitField0_ & ~0x00000040);
16087          deletedINodeRef_ = java.util.Collections.emptyList();
16088          bitField0_ = (bitField0_ & ~0x00000080);
16089          return this;
16090        }
16091
16092        public Builder clone() {
16093          return create().mergeFrom(buildPartial());
16094        }
16095
16096        public com.google.protobuf.Descriptors.Descriptor
16097            getDescriptorForType() {
16098          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
16099        }
16100
16101        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff getDefaultInstanceForType() {
16102          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.getDefaultInstance();
16103        }
16104
16105        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff build() {
16106          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff result = buildPartial();
16107          if (!result.isInitialized()) {
16108            throw newUninitializedMessageException(result);
16109          }
16110          return result;
16111        }
16112
16113        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff buildPartial() {
16114          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff(this);
16115          int from_bitField0_ = bitField0_;
16116          int to_bitField0_ = 0;
16117          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
16118            to_bitField0_ |= 0x00000001;
16119          }
16120          result.snapshotId_ = snapshotId_;
16121          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
16122            to_bitField0_ |= 0x00000002;
16123          }
16124          result.childrenSize_ = childrenSize_;
16125          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
16126            to_bitField0_ |= 0x00000004;
16127          }
16128          result.isSnapshotRoot_ = isSnapshotRoot_;
16129          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
16130            to_bitField0_ |= 0x00000008;
16131          }
16132          result.name_ = name_;
16133          if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
16134            to_bitField0_ |= 0x00000010;
16135          }
16136          if (snapshotCopyBuilder_ == null) {
16137            result.snapshotCopy_ = snapshotCopy_;
16138          } else {
16139            result.snapshotCopy_ = snapshotCopyBuilder_.build();
16140          }
16141          if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
16142            to_bitField0_ |= 0x00000020;
16143          }
16144          result.createdListSize_ = createdListSize_;
16145          if (((bitField0_ & 0x00000040) == 0x00000040)) {
16146            deletedINode_ = java.util.Collections.unmodifiableList(deletedINode_);
16147            bitField0_ = (bitField0_ & ~0x00000040);
16148          }
16149          result.deletedINode_ = deletedINode_;
16150          if (((bitField0_ & 0x00000080) == 0x00000080)) {
16151            deletedINodeRef_ = java.util.Collections.unmodifiableList(deletedINodeRef_);
16152            bitField0_ = (bitField0_ & ~0x00000080);
16153          }
16154          result.deletedINodeRef_ = deletedINodeRef_;
16155          result.bitField0_ = to_bitField0_;
16156          onBuilt();
16157          return result;
16158        }
16159
16160        public Builder mergeFrom(com.google.protobuf.Message other) {
16161          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff) {
16162            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff)other);
16163          } else {
16164            super.mergeFrom(other);
16165            return this;
16166          }
16167        }
16168
16169        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff other) {
16170          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff.getDefaultInstance()) return this;
16171          if (other.hasSnapshotId()) {
16172            setSnapshotId(other.getSnapshotId());
16173          }
16174          if (other.hasChildrenSize()) {
16175            setChildrenSize(other.getChildrenSize());
16176          }
16177          if (other.hasIsSnapshotRoot()) {
16178            setIsSnapshotRoot(other.getIsSnapshotRoot());
16179          }
16180          if (other.hasName()) {
16181            setName(other.getName());
16182          }
16183          if (other.hasSnapshotCopy()) {
16184            mergeSnapshotCopy(other.getSnapshotCopy());
16185          }
16186          if (other.hasCreatedListSize()) {
16187            setCreatedListSize(other.getCreatedListSize());
16188          }
16189          if (!other.deletedINode_.isEmpty()) {
16190            if (deletedINode_.isEmpty()) {
16191              deletedINode_ = other.deletedINode_;
16192              bitField0_ = (bitField0_ & ~0x00000040);
16193            } else {
16194              ensureDeletedINodeIsMutable();
16195              deletedINode_.addAll(other.deletedINode_);
16196            }
16197            onChanged();
16198          }
16199          if (!other.deletedINodeRef_.isEmpty()) {
16200            if (deletedINodeRef_.isEmpty()) {
16201              deletedINodeRef_ = other.deletedINodeRef_;
16202              bitField0_ = (bitField0_ & ~0x00000080);
16203            } else {
16204              ensureDeletedINodeRefIsMutable();
16205              deletedINodeRef_.addAll(other.deletedINodeRef_);
16206            }
16207            onChanged();
16208          }
16209          this.mergeUnknownFields(other.getUnknownFields());
16210          return this;
16211        }
16212
16213        public final boolean isInitialized() {
16214          if (hasSnapshotCopy()) {
16215            if (!getSnapshotCopy().isInitialized()) {
16216              
16217              return false;
16218            }
16219          }
16220          return true;
16221        }
16222
16223        public Builder mergeFrom(
16224            com.google.protobuf.CodedInputStream input,
16225            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16226            throws java.io.IOException {
16227          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff parsedMessage = null;
16228          try {
16229            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
16230          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16231            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DirectoryDiff) e.getUnfinishedMessage();
16232            throw e;
16233          } finally {
16234            if (parsedMessage != null) {
16235              mergeFrom(parsedMessage);
16236            }
16237          }
16238          return this;
16239        }
16240        private int bitField0_;
16241
16242        // optional uint32 snapshotId = 1;
16243        private int snapshotId_ ;
16244        /**
16245         * <code>optional uint32 snapshotId = 1;</code>
16246         */
16247        public boolean hasSnapshotId() {
16248          return ((bitField0_ & 0x00000001) == 0x00000001);
16249        }
16250        /**
16251         * <code>optional uint32 snapshotId = 1;</code>
16252         */
16253        public int getSnapshotId() {
16254          return snapshotId_;
16255        }
16256        /**
16257         * <code>optional uint32 snapshotId = 1;</code>
16258         */
16259        public Builder setSnapshotId(int value) {
16260          bitField0_ |= 0x00000001;
16261          snapshotId_ = value;
16262          onChanged();
16263          return this;
16264        }
16265        /**
16266         * <code>optional uint32 snapshotId = 1;</code>
16267         */
16268        public Builder clearSnapshotId() {
16269          bitField0_ = (bitField0_ & ~0x00000001);
16270          snapshotId_ = 0;
16271          onChanged();
16272          return this;
16273        }
16274
16275        // optional uint32 childrenSize = 2;
16276        private int childrenSize_ ;
16277        /**
16278         * <code>optional uint32 childrenSize = 2;</code>
16279         */
16280        public boolean hasChildrenSize() {
16281          return ((bitField0_ & 0x00000002) == 0x00000002);
16282        }
16283        /**
16284         * <code>optional uint32 childrenSize = 2;</code>
16285         */
16286        public int getChildrenSize() {
16287          return childrenSize_;
16288        }
16289        /**
16290         * <code>optional uint32 childrenSize = 2;</code>
16291         */
16292        public Builder setChildrenSize(int value) {
16293          bitField0_ |= 0x00000002;
16294          childrenSize_ = value;
16295          onChanged();
16296          return this;
16297        }
16298        /**
16299         * <code>optional uint32 childrenSize = 2;</code>
16300         */
16301        public Builder clearChildrenSize() {
16302          bitField0_ = (bitField0_ & ~0x00000002);
16303          childrenSize_ = 0;
16304          onChanged();
16305          return this;
16306        }
16307
16308        // optional bool isSnapshotRoot = 3;
16309        private boolean isSnapshotRoot_ ;
16310        /**
16311         * <code>optional bool isSnapshotRoot = 3;</code>
16312         */
16313        public boolean hasIsSnapshotRoot() {
16314          return ((bitField0_ & 0x00000004) == 0x00000004);
16315        }
16316        /**
16317         * <code>optional bool isSnapshotRoot = 3;</code>
16318         */
16319        public boolean getIsSnapshotRoot() {
16320          return isSnapshotRoot_;
16321        }
16322        /**
16323         * <code>optional bool isSnapshotRoot = 3;</code>
16324         */
16325        public Builder setIsSnapshotRoot(boolean value) {
16326          bitField0_ |= 0x00000004;
16327          isSnapshotRoot_ = value;
16328          onChanged();
16329          return this;
16330        }
16331        /**
16332         * <code>optional bool isSnapshotRoot = 3;</code>
16333         */
16334        public Builder clearIsSnapshotRoot() {
16335          bitField0_ = (bitField0_ & ~0x00000004);
16336          isSnapshotRoot_ = false;
16337          onChanged();
16338          return this;
16339        }
16340
16341        // optional bytes name = 4;
16342        private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
16343        /**
16344         * <code>optional bytes name = 4;</code>
16345         */
16346        public boolean hasName() {
16347          return ((bitField0_ & 0x00000008) == 0x00000008);
16348        }
16349        /**
16350         * <code>optional bytes name = 4;</code>
16351         */
16352        public com.google.protobuf.ByteString getName() {
16353          return name_;
16354        }
16355        /**
16356         * <code>optional bytes name = 4;</code>
16357         */
16358        public Builder setName(com.google.protobuf.ByteString value) {
16359          if (value == null) {
16360    throw new NullPointerException();
16361  }
16362  bitField0_ |= 0x00000008;
16363          name_ = value;
16364          onChanged();
16365          return this;
16366        }
16367        /**
16368         * <code>optional bytes name = 4;</code>
16369         */
16370        public Builder clearName() {
16371          bitField0_ = (bitField0_ & ~0x00000008);
16372          name_ = getDefaultInstance().getName();
16373          onChanged();
16374          return this;
16375        }
16376
16377        // optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;
16378        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
16379        private com.google.protobuf.SingleFieldBuilder<
16380            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> snapshotCopyBuilder_;
16381        /**
16382         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16383         */
16384        public boolean hasSnapshotCopy() {
16385          return ((bitField0_ & 0x00000010) == 0x00000010);
16386        }
16387        /**
16388         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16389         */
16390        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory getSnapshotCopy() {
16391          if (snapshotCopyBuilder_ == null) {
16392            return snapshotCopy_;
16393          } else {
16394            return snapshotCopyBuilder_.getMessage();
16395          }
16396        }
16397        /**
16398         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16399         */
16400        public Builder setSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
16401          if (snapshotCopyBuilder_ == null) {
16402            if (value == null) {
16403              throw new NullPointerException();
16404            }
16405            snapshotCopy_ = value;
16406            onChanged();
16407          } else {
16408            snapshotCopyBuilder_.setMessage(value);
16409          }
16410          bitField0_ |= 0x00000010;
16411          return this;
16412        }
16413        /**
16414         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16415         */
16416        public Builder setSnapshotCopy(
16417            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder builderForValue) {
16418          if (snapshotCopyBuilder_ == null) {
16419            snapshotCopy_ = builderForValue.build();
16420            onChanged();
16421          } else {
16422            snapshotCopyBuilder_.setMessage(builderForValue.build());
16423          }
16424          bitField0_ |= 0x00000010;
16425          return this;
16426        }
16427        /**
16428         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16429         */
16430        public Builder mergeSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory value) {
16431          if (snapshotCopyBuilder_ == null) {
16432            if (((bitField0_ & 0x00000010) == 0x00000010) &&
16433                snapshotCopy_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance()) {
16434              snapshotCopy_ =
16435                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.newBuilder(snapshotCopy_).mergeFrom(value).buildPartial();
16436            } else {
16437              snapshotCopy_ = value;
16438            }
16439            onChanged();
16440          } else {
16441            snapshotCopyBuilder_.mergeFrom(value);
16442          }
16443          bitField0_ |= 0x00000010;
16444          return this;
16445        }
16446        /**
16447         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16448         */
16449        public Builder clearSnapshotCopy() {
16450          if (snapshotCopyBuilder_ == null) {
16451            snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.getDefaultInstance();
16452            onChanged();
16453          } else {
16454            snapshotCopyBuilder_.clear();
16455          }
16456          bitField0_ = (bitField0_ & ~0x00000010);
16457          return this;
16458        }
16459        /**
16460         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16461         */
16462        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder getSnapshotCopyBuilder() {
16463          bitField0_ |= 0x00000010;
16464          onChanged();
16465          return getSnapshotCopyFieldBuilder().getBuilder();
16466        }
16467        /**
16468         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16469         */
16470        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder getSnapshotCopyOrBuilder() {
16471          if (snapshotCopyBuilder_ != null) {
16472            return snapshotCopyBuilder_.getMessageOrBuilder();
16473          } else {
16474            return snapshotCopy_;
16475          }
16476        }
16477        /**
16478         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeDirectory snapshotCopy = 5;</code>
16479         */
16480        private com.google.protobuf.SingleFieldBuilder<
16481            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder> 
16482            getSnapshotCopyFieldBuilder() {
16483          if (snapshotCopyBuilder_ == null) {
16484            snapshotCopyBuilder_ = new com.google.protobuf.SingleFieldBuilder<
16485                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectoryOrBuilder>(
16486                    snapshotCopy_,
16487                    getParentForChildren(),
16488                    isClean());
16489            snapshotCopy_ = null;
16490          }
16491          return snapshotCopyBuilder_;
16492        }
16493
16494        // optional uint32 createdListSize = 6;
16495        private int createdListSize_ ;
16496        /**
16497         * <code>optional uint32 createdListSize = 6;</code>
16498         */
16499        public boolean hasCreatedListSize() {
16500          return ((bitField0_ & 0x00000020) == 0x00000020);
16501        }
16502        /**
16503         * <code>optional uint32 createdListSize = 6;</code>
16504         */
16505        public int getCreatedListSize() {
16506          return createdListSize_;
16507        }
16508        /**
16509         * <code>optional uint32 createdListSize = 6;</code>
16510         */
16511        public Builder setCreatedListSize(int value) {
16512          bitField0_ |= 0x00000020;
16513          createdListSize_ = value;
16514          onChanged();
16515          return this;
16516        }
16517        /**
16518         * <code>optional uint32 createdListSize = 6;</code>
16519         */
16520        public Builder clearCreatedListSize() {
16521          bitField0_ = (bitField0_ & ~0x00000020);
16522          createdListSize_ = 0;
16523          onChanged();
16524          return this;
16525        }
16526
16527        // repeated uint64 deletedINode = 7 [packed = true];
16528        private java.util.List<java.lang.Long> deletedINode_ = java.util.Collections.emptyList();
16529        private void ensureDeletedINodeIsMutable() {
16530          if (!((bitField0_ & 0x00000040) == 0x00000040)) {
16531            deletedINode_ = new java.util.ArrayList<java.lang.Long>(deletedINode_);
16532            bitField0_ |= 0x00000040;
16533           }
16534        }
16535        /**
16536         * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16537         *
16538         * <pre>
16539         * id of deleted inodes
16540         * </pre>
16541         */
16542        public java.util.List<java.lang.Long>
16543            getDeletedINodeList() {
16544          return java.util.Collections.unmodifiableList(deletedINode_);
16545        }
16546        /**
16547         * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16548         *
16549         * <pre>
16550         * id of deleted inodes
16551         * </pre>
16552         */
16553        public int getDeletedINodeCount() {
16554          return deletedINode_.size();
16555        }
16556        /**
16557         * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16558         *
16559         * <pre>
16560         * id of deleted inodes
16561         * </pre>
16562         */
16563        public long getDeletedINode(int index) {
16564          return deletedINode_.get(index);
16565        }
16566        /**
16567         * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16568         *
16569         * <pre>
16570         * id of deleted inodes
16571         * </pre>
16572         */
16573        public Builder setDeletedINode(
16574            int index, long value) {
16575          ensureDeletedINodeIsMutable();
16576          deletedINode_.set(index, value);
16577          onChanged();
16578          return this;
16579        }
16580        /**
16581         * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16582         *
16583         * <pre>
16584         * id of deleted inodes
16585         * </pre>
16586         */
16587        public Builder addDeletedINode(long value) {
16588          ensureDeletedINodeIsMutable();
16589          deletedINode_.add(value);
16590          onChanged();
16591          return this;
16592        }
16593        /**
16594         * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16595         *
16596         * <pre>
16597         * id of deleted inodes
16598         * </pre>
16599         */
16600        public Builder addAllDeletedINode(
16601            java.lang.Iterable<? extends java.lang.Long> values) {
16602          ensureDeletedINodeIsMutable();
16603          super.addAll(values, deletedINode_);
16604          onChanged();
16605          return this;
16606        }
16607        /**
16608         * <code>repeated uint64 deletedINode = 7 [packed = true];</code>
16609         *
16610         * <pre>
16611         * id of deleted inodes
16612         * </pre>
16613         */
16614        public Builder clearDeletedINode() {
16615          deletedINode_ = java.util.Collections.emptyList();
16616          bitField0_ = (bitField0_ & ~0x00000040);
16617          onChanged();
16618          return this;
16619        }
16620
16621        // repeated uint32 deletedINodeRef = 8 [packed = true];
16622        private java.util.List<java.lang.Integer> deletedINodeRef_ = java.util.Collections.emptyList();
16623        private void ensureDeletedINodeRefIsMutable() {
16624          if (!((bitField0_ & 0x00000080) == 0x00000080)) {
16625            deletedINodeRef_ = new java.util.ArrayList<java.lang.Integer>(deletedINodeRef_);
16626            bitField0_ |= 0x00000080;
16627           }
16628        }
16629        /**
16630         * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16631         *
16632         * <pre>
16633         * id of reference nodes in the deleted list
16634         * </pre>
16635         */
16636        public java.util.List<java.lang.Integer>
16637            getDeletedINodeRefList() {
16638          return java.util.Collections.unmodifiableList(deletedINodeRef_);
16639        }
16640        /**
16641         * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16642         *
16643         * <pre>
16644         * id of reference nodes in the deleted list
16645         * </pre>
16646         */
16647        public int getDeletedINodeRefCount() {
16648          return deletedINodeRef_.size();
16649        }
16650        /**
16651         * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16652         *
16653         * <pre>
16654         * id of reference nodes in the deleted list
16655         * </pre>
16656         */
16657        public int getDeletedINodeRef(int index) {
16658          return deletedINodeRef_.get(index);
16659        }
16660        /**
16661         * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16662         *
16663         * <pre>
16664         * id of reference nodes in the deleted list
16665         * </pre>
16666         */
16667        public Builder setDeletedINodeRef(
16668            int index, int value) {
16669          ensureDeletedINodeRefIsMutable();
16670          deletedINodeRef_.set(index, value);
16671          onChanged();
16672          return this;
16673        }
16674        /**
16675         * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16676         *
16677         * <pre>
16678         * id of reference nodes in the deleted list
16679         * </pre>
16680         */
16681        public Builder addDeletedINodeRef(int value) {
16682          ensureDeletedINodeRefIsMutable();
16683          deletedINodeRef_.add(value);
16684          onChanged();
16685          return this;
16686        }
16687        /**
16688         * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16689         *
16690         * <pre>
16691         * id of reference nodes in the deleted list
16692         * </pre>
16693         */
16694        public Builder addAllDeletedINodeRef(
16695            java.lang.Iterable<? extends java.lang.Integer> values) {
16696          ensureDeletedINodeRefIsMutable();
16697          super.addAll(values, deletedINodeRef_);
16698          onChanged();
16699          return this;
16700        }
16701        /**
16702         * <code>repeated uint32 deletedINodeRef = 8 [packed = true];</code>
16703         *
16704         * <pre>
16705         * id of reference nodes in the deleted list
16706         * </pre>
16707         */
16708        public Builder clearDeletedINodeRef() {
16709          deletedINodeRef_ = java.util.Collections.emptyList();
16710          bitField0_ = (bitField0_ & ~0x00000080);
16711          onChanged();
16712          return this;
16713        }
16714
16715        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff)
16716      }
16717
16718      static {
16719        defaultInstance = new DirectoryDiff(true);
16720        defaultInstance.initFields();
16721      }
16722
16723      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DirectoryDiff)
16724    }
16725
16726    public interface FileDiffOrBuilder
16727        extends com.google.protobuf.MessageOrBuilder {
16728
16729      // optional uint32 snapshotId = 1;
16730      /**
16731       * <code>optional uint32 snapshotId = 1;</code>
16732       */
16733      boolean hasSnapshotId();
16734      /**
16735       * <code>optional uint32 snapshotId = 1;</code>
16736       */
16737      int getSnapshotId();
16738
16739      // optional uint64 fileSize = 2;
16740      /**
16741       * <code>optional uint64 fileSize = 2;</code>
16742       */
16743      boolean hasFileSize();
16744      /**
16745       * <code>optional uint64 fileSize = 2;</code>
16746       */
16747      long getFileSize();
16748
16749      // optional bytes name = 3;
16750      /**
16751       * <code>optional bytes name = 3;</code>
16752       */
16753      boolean hasName();
16754      /**
16755       * <code>optional bytes name = 3;</code>
16756       */
16757      com.google.protobuf.ByteString getName();
16758
16759      // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;
16760      /**
16761       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
16762       */
16763      boolean hasSnapshotCopy();
16764      /**
16765       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
16766       */
16767      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getSnapshotCopy();
16768      /**
16769       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
16770       */
16771      org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getSnapshotCopyOrBuilder();
16772    }
16773    /**
16774     * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff}
16775     */
16776    public static final class FileDiff extends
16777        com.google.protobuf.GeneratedMessage
16778        implements FileDiffOrBuilder {
16779      // Use FileDiff.newBuilder() to construct.
16780      private FileDiff(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16781        super(builder);
16782        this.unknownFields = builder.getUnknownFields();
16783      }
16784      private FileDiff(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16785
16786      private static final FileDiff defaultInstance;
16787      public static FileDiff getDefaultInstance() {
16788        return defaultInstance;
16789      }
16790
16791      public FileDiff getDefaultInstanceForType() {
16792        return defaultInstance;
16793      }
16794
16795      private final com.google.protobuf.UnknownFieldSet unknownFields;
16796      @java.lang.Override
16797      public final com.google.protobuf.UnknownFieldSet
16798          getUnknownFields() {
16799        return this.unknownFields;
16800      }
16801      private FileDiff(
16802          com.google.protobuf.CodedInputStream input,
16803          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16804          throws com.google.protobuf.InvalidProtocolBufferException {
16805        initFields();
16806        int mutable_bitField0_ = 0;
16807        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16808            com.google.protobuf.UnknownFieldSet.newBuilder();
16809        try {
16810          boolean done = false;
16811          while (!done) {
16812            int tag = input.readTag();
16813            switch (tag) {
16814              case 0:
16815                done = true;
16816                break;
16817              default: {
16818                if (!parseUnknownField(input, unknownFields,
16819                                       extensionRegistry, tag)) {
16820                  done = true;
16821                }
16822                break;
16823              }
16824              case 8: {
16825                bitField0_ |= 0x00000001;
16826                snapshotId_ = input.readUInt32();
16827                break;
16828              }
16829              case 16: {
16830                bitField0_ |= 0x00000002;
16831                fileSize_ = input.readUInt64();
16832                break;
16833              }
16834              case 26: {
16835                bitField0_ |= 0x00000004;
16836                name_ = input.readBytes();
16837                break;
16838              }
16839              case 34: {
16840                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder subBuilder = null;
16841                if (((bitField0_ & 0x00000008) == 0x00000008)) {
16842                  subBuilder = snapshotCopy_.toBuilder();
16843                }
16844                snapshotCopy_ = input.readMessage(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.PARSER, extensionRegistry);
16845                if (subBuilder != null) {
16846                  subBuilder.mergeFrom(snapshotCopy_);
16847                  snapshotCopy_ = subBuilder.buildPartial();
16848                }
16849                bitField0_ |= 0x00000008;
16850                break;
16851              }
16852            }
16853          }
16854        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16855          throw e.setUnfinishedMessage(this);
16856        } catch (java.io.IOException e) {
16857          throw new com.google.protobuf.InvalidProtocolBufferException(
16858              e.getMessage()).setUnfinishedMessage(this);
16859        } finally {
16860          this.unknownFields = unknownFields.build();
16861          makeExtensionsImmutable();
16862        }
16863      }
16864      public static final com.google.protobuf.Descriptors.Descriptor
16865          getDescriptor() {
16866        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
16867      }
16868
16869      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16870          internalGetFieldAccessorTable() {
16871        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable
16872            .ensureFieldAccessorsInitialized(
16873                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.Builder.class);
16874      }
16875
16876      public static com.google.protobuf.Parser<FileDiff> PARSER =
16877          new com.google.protobuf.AbstractParser<FileDiff>() {
16878        public FileDiff parsePartialFrom(
16879            com.google.protobuf.CodedInputStream input,
16880            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16881            throws com.google.protobuf.InvalidProtocolBufferException {
16882          return new FileDiff(input, extensionRegistry);
16883        }
16884      };
16885
16886      @java.lang.Override
16887      public com.google.protobuf.Parser<FileDiff> getParserForType() {
16888        return PARSER;
16889      }
16890
16891      private int bitField0_;
16892      // optional uint32 snapshotId = 1;
16893      public static final int SNAPSHOTID_FIELD_NUMBER = 1;
16894      private int snapshotId_;
16895      /**
16896       * <code>optional uint32 snapshotId = 1;</code>
16897       */
16898      public boolean hasSnapshotId() {
16899        return ((bitField0_ & 0x00000001) == 0x00000001);
16900      }
16901      /**
16902       * <code>optional uint32 snapshotId = 1;</code>
16903       */
16904      public int getSnapshotId() {
16905        return snapshotId_;
16906      }
16907
16908      // optional uint64 fileSize = 2;
16909      public static final int FILESIZE_FIELD_NUMBER = 2;
16910      private long fileSize_;
16911      /**
16912       * <code>optional uint64 fileSize = 2;</code>
16913       */
16914      public boolean hasFileSize() {
16915        return ((bitField0_ & 0x00000002) == 0x00000002);
16916      }
16917      /**
16918       * <code>optional uint64 fileSize = 2;</code>
16919       */
16920      public long getFileSize() {
16921        return fileSize_;
16922      }
16923
16924      // optional bytes name = 3;
16925      public static final int NAME_FIELD_NUMBER = 3;
16926      private com.google.protobuf.ByteString name_;
16927      /**
16928       * <code>optional bytes name = 3;</code>
16929       */
16930      public boolean hasName() {
16931        return ((bitField0_ & 0x00000004) == 0x00000004);
16932      }
16933      /**
16934       * <code>optional bytes name = 3;</code>
16935       */
16936      public com.google.protobuf.ByteString getName() {
16937        return name_;
16938      }
16939
16940      // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;
16941      public static final int SNAPSHOTCOPY_FIELD_NUMBER = 4;
16942      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile snapshotCopy_;
16943      /**
16944       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
16945       */
16946      public boolean hasSnapshotCopy() {
16947        return ((bitField0_ & 0x00000008) == 0x00000008);
16948      }
16949      /**
16950       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
16951       */
16952      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getSnapshotCopy() {
16953        return snapshotCopy_;
16954      }
16955      /**
16956       * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
16957       */
16958      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getSnapshotCopyOrBuilder() {
16959        return snapshotCopy_;
16960      }
16961
16962      private void initFields() {
16963        snapshotId_ = 0;
16964        fileSize_ = 0L;
16965        name_ = com.google.protobuf.ByteString.EMPTY;
16966        snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
16967      }
16968      private byte memoizedIsInitialized = -1;
16969      public final boolean isInitialized() {
16970        byte isInitialized = memoizedIsInitialized;
16971        if (isInitialized != -1) return isInitialized == 1;
16972
16973        if (hasSnapshotCopy()) {
16974          if (!getSnapshotCopy().isInitialized()) {
16975            memoizedIsInitialized = 0;
16976            return false;
16977          }
16978        }
16979        memoizedIsInitialized = 1;
16980        return true;
16981      }
16982
16983      public void writeTo(com.google.protobuf.CodedOutputStream output)
16984                          throws java.io.IOException {
16985        getSerializedSize();
16986        if (((bitField0_ & 0x00000001) == 0x00000001)) {
16987          output.writeUInt32(1, snapshotId_);
16988        }
16989        if (((bitField0_ & 0x00000002) == 0x00000002)) {
16990          output.writeUInt64(2, fileSize_);
16991        }
16992        if (((bitField0_ & 0x00000004) == 0x00000004)) {
16993          output.writeBytes(3, name_);
16994        }
16995        if (((bitField0_ & 0x00000008) == 0x00000008)) {
16996          output.writeMessage(4, snapshotCopy_);
16997        }
16998        getUnknownFields().writeTo(output);
16999      }
17000
17001      private int memoizedSerializedSize = -1;
17002      public int getSerializedSize() {
17003        int size = memoizedSerializedSize;
17004        if (size != -1) return size;
17005
17006        size = 0;
17007        if (((bitField0_ & 0x00000001) == 0x00000001)) {
17008          size += com.google.protobuf.CodedOutputStream
17009            .computeUInt32Size(1, snapshotId_);
17010        }
17011        if (((bitField0_ & 0x00000002) == 0x00000002)) {
17012          size += com.google.protobuf.CodedOutputStream
17013            .computeUInt64Size(2, fileSize_);
17014        }
17015        if (((bitField0_ & 0x00000004) == 0x00000004)) {
17016          size += com.google.protobuf.CodedOutputStream
17017            .computeBytesSize(3, name_);
17018        }
17019        if (((bitField0_ & 0x00000008) == 0x00000008)) {
17020          size += com.google.protobuf.CodedOutputStream
17021            .computeMessageSize(4, snapshotCopy_);
17022        }
17023        size += getUnknownFields().getSerializedSize();
17024        memoizedSerializedSize = size;
17025        return size;
17026      }
17027
17028      private static final long serialVersionUID = 0L;
17029      @java.lang.Override
17030      protected java.lang.Object writeReplace()
17031          throws java.io.ObjectStreamException {
17032        return super.writeReplace();
17033      }
17034
17035      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17036          com.google.protobuf.ByteString data)
17037          throws com.google.protobuf.InvalidProtocolBufferException {
17038        return PARSER.parseFrom(data);
17039      }
17040      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17041          com.google.protobuf.ByteString data,
17042          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17043          throws com.google.protobuf.InvalidProtocolBufferException {
17044        return PARSER.parseFrom(data, extensionRegistry);
17045      }
17046      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(byte[] data)
17047          throws com.google.protobuf.InvalidProtocolBufferException {
17048        return PARSER.parseFrom(data);
17049      }
17050      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17051          byte[] data,
17052          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17053          throws com.google.protobuf.InvalidProtocolBufferException {
17054        return PARSER.parseFrom(data, extensionRegistry);
17055      }
17056      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(java.io.InputStream input)
17057          throws java.io.IOException {
17058        return PARSER.parseFrom(input);
17059      }
17060      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17061          java.io.InputStream input,
17062          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17063          throws java.io.IOException {
17064        return PARSER.parseFrom(input, extensionRegistry);
17065      }
17066      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseDelimitedFrom(java.io.InputStream input)
17067          throws java.io.IOException {
17068        return PARSER.parseDelimitedFrom(input);
17069      }
17070      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseDelimitedFrom(
17071          java.io.InputStream input,
17072          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17073          throws java.io.IOException {
17074        return PARSER.parseDelimitedFrom(input, extensionRegistry);
17075      }
17076      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17077          com.google.protobuf.CodedInputStream input)
17078          throws java.io.IOException {
17079        return PARSER.parseFrom(input);
17080      }
17081      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parseFrom(
17082          com.google.protobuf.CodedInputStream input,
17083          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17084          throws java.io.IOException {
17085        return PARSER.parseFrom(input, extensionRegistry);
17086      }
17087
17088      public static Builder newBuilder() { return Builder.create(); }
17089      public Builder newBuilderForType() { return newBuilder(); }
17090      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff prototype) {
17091        return newBuilder().mergeFrom(prototype);
17092      }
17093      public Builder toBuilder() { return newBuilder(this); }
17094
17095      @java.lang.Override
17096      protected Builder newBuilderForType(
17097          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17098        Builder builder = new Builder(parent);
17099        return builder;
17100      }
17101      /**
17102       * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff}
17103       */
17104      public static final class Builder extends
17105          com.google.protobuf.GeneratedMessage.Builder<Builder>
17106         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiffOrBuilder {
17107        public static final com.google.protobuf.Descriptors.Descriptor
17108            getDescriptor() {
17109          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
17110        }
17111
17112        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17113            internalGetFieldAccessorTable() {
17114          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable
17115              .ensureFieldAccessorsInitialized(
17116                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.Builder.class);
17117        }
17118
17119        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.newBuilder()
17120        private Builder() {
17121          maybeForceBuilderInitialization();
17122        }
17123
17124        private Builder(
17125            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17126          super(parent);
17127          maybeForceBuilderInitialization();
17128        }
17129        private void maybeForceBuilderInitialization() {
17130          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17131            getSnapshotCopyFieldBuilder();
17132          }
17133        }
17134        private static Builder create() {
17135          return new Builder();
17136        }
17137
17138        public Builder clear() {
17139          super.clear();
17140          snapshotId_ = 0;
17141          bitField0_ = (bitField0_ & ~0x00000001);
17142          fileSize_ = 0L;
17143          bitField0_ = (bitField0_ & ~0x00000002);
17144          name_ = com.google.protobuf.ByteString.EMPTY;
17145          bitField0_ = (bitField0_ & ~0x00000004);
17146          if (snapshotCopyBuilder_ == null) {
17147            snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
17148          } else {
17149            snapshotCopyBuilder_.clear();
17150          }
17151          bitField0_ = (bitField0_ & ~0x00000008);
17152          return this;
17153        }
17154
17155        public Builder clone() {
17156          return create().mergeFrom(buildPartial());
17157        }
17158
17159        public com.google.protobuf.Descriptors.Descriptor
17160            getDescriptorForType() {
17161          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
17162        }
17163
17164        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff getDefaultInstanceForType() {
17165          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.getDefaultInstance();
17166        }
17167
17168        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff build() {
17169          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff result = buildPartial();
17170          if (!result.isInitialized()) {
17171            throw newUninitializedMessageException(result);
17172          }
17173          return result;
17174        }
17175
17176        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff buildPartial() {
17177          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff(this);
17178          int from_bitField0_ = bitField0_;
17179          int to_bitField0_ = 0;
17180          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
17181            to_bitField0_ |= 0x00000001;
17182          }
17183          result.snapshotId_ = snapshotId_;
17184          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
17185            to_bitField0_ |= 0x00000002;
17186          }
17187          result.fileSize_ = fileSize_;
17188          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
17189            to_bitField0_ |= 0x00000004;
17190          }
17191          result.name_ = name_;
17192          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
17193            to_bitField0_ |= 0x00000008;
17194          }
17195          if (snapshotCopyBuilder_ == null) {
17196            result.snapshotCopy_ = snapshotCopy_;
17197          } else {
17198            result.snapshotCopy_ = snapshotCopyBuilder_.build();
17199          }
17200          result.bitField0_ = to_bitField0_;
17201          onBuilt();
17202          return result;
17203        }
17204
17205        public Builder mergeFrom(com.google.protobuf.Message other) {
17206          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff) {
17207            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff)other);
17208          } else {
17209            super.mergeFrom(other);
17210            return this;
17211          }
17212        }
17213
17214        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff other) {
17215          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff.getDefaultInstance()) return this;
17216          if (other.hasSnapshotId()) {
17217            setSnapshotId(other.getSnapshotId());
17218          }
17219          if (other.hasFileSize()) {
17220            setFileSize(other.getFileSize());
17221          }
17222          if (other.hasName()) {
17223            setName(other.getName());
17224          }
17225          if (other.hasSnapshotCopy()) {
17226            mergeSnapshotCopy(other.getSnapshotCopy());
17227          }
17228          this.mergeUnknownFields(other.getUnknownFields());
17229          return this;
17230        }
17231
17232        public final boolean isInitialized() {
17233          if (hasSnapshotCopy()) {
17234            if (!getSnapshotCopy().isInitialized()) {
17235              
17236              return false;
17237            }
17238          }
17239          return true;
17240        }
17241
17242        public Builder mergeFrom(
17243            com.google.protobuf.CodedInputStream input,
17244            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17245            throws java.io.IOException {
17246          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff parsedMessage = null;
17247          try {
17248            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17249          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17250            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.FileDiff) e.getUnfinishedMessage();
17251            throw e;
17252          } finally {
17253            if (parsedMessage != null) {
17254              mergeFrom(parsedMessage);
17255            }
17256          }
17257          return this;
17258        }
17259        private int bitField0_;
17260
17261        // optional uint32 snapshotId = 1;
17262        private int snapshotId_ ;
17263        /**
17264         * <code>optional uint32 snapshotId = 1;</code>
17265         */
17266        public boolean hasSnapshotId() {
17267          return ((bitField0_ & 0x00000001) == 0x00000001);
17268        }
17269        /**
17270         * <code>optional uint32 snapshotId = 1;</code>
17271         */
17272        public int getSnapshotId() {
17273          return snapshotId_;
17274        }
17275        /**
17276         * <code>optional uint32 snapshotId = 1;</code>
17277         */
17278        public Builder setSnapshotId(int value) {
17279          bitField0_ |= 0x00000001;
17280          snapshotId_ = value;
17281          onChanged();
17282          return this;
17283        }
17284        /**
17285         * <code>optional uint32 snapshotId = 1;</code>
17286         */
17287        public Builder clearSnapshotId() {
17288          bitField0_ = (bitField0_ & ~0x00000001);
17289          snapshotId_ = 0;
17290          onChanged();
17291          return this;
17292        }
17293
17294        // optional uint64 fileSize = 2;
17295        private long fileSize_ ;
17296        /**
17297         * <code>optional uint64 fileSize = 2;</code>
17298         */
17299        public boolean hasFileSize() {
17300          return ((bitField0_ & 0x00000002) == 0x00000002);
17301        }
17302        /**
17303         * <code>optional uint64 fileSize = 2;</code>
17304         */
17305        public long getFileSize() {
17306          return fileSize_;
17307        }
17308        /**
17309         * <code>optional uint64 fileSize = 2;</code>
17310         */
17311        public Builder setFileSize(long value) {
17312          bitField0_ |= 0x00000002;
17313          fileSize_ = value;
17314          onChanged();
17315          return this;
17316        }
17317        /**
17318         * <code>optional uint64 fileSize = 2;</code>
17319         */
17320        public Builder clearFileSize() {
17321          bitField0_ = (bitField0_ & ~0x00000002);
17322          fileSize_ = 0L;
17323          onChanged();
17324          return this;
17325        }
17326
17327        // optional bytes name = 3;
17328        private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
17329        /**
17330         * <code>optional bytes name = 3;</code>
17331         */
17332        public boolean hasName() {
17333          return ((bitField0_ & 0x00000004) == 0x00000004);
17334        }
17335        /**
17336         * <code>optional bytes name = 3;</code>
17337         */
17338        public com.google.protobuf.ByteString getName() {
17339          return name_;
17340        }
17341        /**
17342         * <code>optional bytes name = 3;</code>
17343         */
17344        public Builder setName(com.google.protobuf.ByteString value) {
17345          if (value == null) {
17346    throw new NullPointerException();
17347  }
17348  bitField0_ |= 0x00000004;
17349          name_ = value;
17350          onChanged();
17351          return this;
17352        }
17353        /**
17354         * <code>optional bytes name = 3;</code>
17355         */
17356        public Builder clearName() {
17357          bitField0_ = (bitField0_ & ~0x00000004);
17358          name_ = getDefaultInstance().getName();
17359          onChanged();
17360          return this;
17361        }
17362
17363        // optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;
17364        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
17365        private com.google.protobuf.SingleFieldBuilder<
17366            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> snapshotCopyBuilder_;
17367        /**
17368         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17369         */
17370        public boolean hasSnapshotCopy() {
17371          return ((bitField0_ & 0x00000008) == 0x00000008);
17372        }
17373        /**
17374         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17375         */
17376        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile getSnapshotCopy() {
17377          if (snapshotCopyBuilder_ == null) {
17378            return snapshotCopy_;
17379          } else {
17380            return snapshotCopyBuilder_.getMessage();
17381          }
17382        }
17383        /**
17384         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17385         */
17386        public Builder setSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
17387          if (snapshotCopyBuilder_ == null) {
17388            if (value == null) {
17389              throw new NullPointerException();
17390            }
17391            snapshotCopy_ = value;
17392            onChanged();
17393          } else {
17394            snapshotCopyBuilder_.setMessage(value);
17395          }
17396          bitField0_ |= 0x00000008;
17397          return this;
17398        }
17399        /**
17400         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17401         */
17402        public Builder setSnapshotCopy(
17403            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder builderForValue) {
17404          if (snapshotCopyBuilder_ == null) {
17405            snapshotCopy_ = builderForValue.build();
17406            onChanged();
17407          } else {
17408            snapshotCopyBuilder_.setMessage(builderForValue.build());
17409          }
17410          bitField0_ |= 0x00000008;
17411          return this;
17412        }
17413        /**
17414         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17415         */
17416        public Builder mergeSnapshotCopy(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile value) {
17417          if (snapshotCopyBuilder_ == null) {
17418            if (((bitField0_ & 0x00000008) == 0x00000008) &&
17419                snapshotCopy_ != org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance()) {
17420              snapshotCopy_ =
17421                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.newBuilder(snapshotCopy_).mergeFrom(value).buildPartial();
17422            } else {
17423              snapshotCopy_ = value;
17424            }
17425            onChanged();
17426          } else {
17427            snapshotCopyBuilder_.mergeFrom(value);
17428          }
17429          bitField0_ |= 0x00000008;
17430          return this;
17431        }
17432        /**
17433         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17434         */
17435        public Builder clearSnapshotCopy() {
17436          if (snapshotCopyBuilder_ == null) {
17437            snapshotCopy_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.getDefaultInstance();
17438            onChanged();
17439          } else {
17440            snapshotCopyBuilder_.clear();
17441          }
17442          bitField0_ = (bitField0_ & ~0x00000008);
17443          return this;
17444        }
17445        /**
17446         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17447         */
17448        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder getSnapshotCopyBuilder() {
17449          bitField0_ |= 0x00000008;
17450          onChanged();
17451          return getSnapshotCopyFieldBuilder().getBuilder();
17452        }
17453        /**
17454         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17455         */
17456        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder getSnapshotCopyOrBuilder() {
17457          if (snapshotCopyBuilder_ != null) {
17458            return snapshotCopyBuilder_.getMessageOrBuilder();
17459          } else {
17460            return snapshotCopy_;
17461          }
17462        }
17463        /**
17464         * <code>optional .hadoop.hdfs.fsimage.INodeSection.INodeFile snapshotCopy = 4;</code>
17465         */
17466        private com.google.protobuf.SingleFieldBuilder<
17467            org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder> 
17468            getSnapshotCopyFieldBuilder() {
17469          if (snapshotCopyBuilder_ == null) {
17470            snapshotCopyBuilder_ = new com.google.protobuf.SingleFieldBuilder<
17471                org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile.Builder, org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFileOrBuilder>(
17472                    snapshotCopy_,
17473                    getParentForChildren(),
17474                    isClean());
17475            snapshotCopy_ = null;
17476          }
17477          return snapshotCopyBuilder_;
17478        }
17479
17480        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff)
17481      }
17482
17483      static {
17484        defaultInstance = new FileDiff(true);
17485        defaultInstance.initFields();
17486      }
17487
17488      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.FileDiff)
17489    }
17490
17491    public interface DiffEntryOrBuilder
17492        extends com.google.protobuf.MessageOrBuilder {
17493
17494      // required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;
17495      /**
17496       * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
17497       */
17498      boolean hasType();
17499      /**
17500       * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
17501       */
17502      org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type getType();
17503
17504      // optional uint64 inodeId = 2;
17505      /**
17506       * <code>optional uint64 inodeId = 2;</code>
17507       */
17508      boolean hasInodeId();
17509      /**
17510       * <code>optional uint64 inodeId = 2;</code>
17511       */
17512      long getInodeId();
17513
17514      // optional uint32 numOfDiff = 3;
17515      /**
17516       * <code>optional uint32 numOfDiff = 3;</code>
17517       */
17518      boolean hasNumOfDiff();
17519      /**
17520       * <code>optional uint32 numOfDiff = 3;</code>
17521       */
17522      int getNumOfDiff();
17523    }
17524    /**
17525     * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry}
17526     */
17527    public static final class DiffEntry extends
17528        com.google.protobuf.GeneratedMessage
17529        implements DiffEntryOrBuilder {
17530      // Use DiffEntry.newBuilder() to construct.
17531      private DiffEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17532        super(builder);
17533        this.unknownFields = builder.getUnknownFields();
17534      }
17535      private DiffEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
17536
17537      private static final DiffEntry defaultInstance;
17538      public static DiffEntry getDefaultInstance() {
17539        return defaultInstance;
17540      }
17541
17542      public DiffEntry getDefaultInstanceForType() {
17543        return defaultInstance;
17544      }
17545
17546      private final com.google.protobuf.UnknownFieldSet unknownFields;
17547      @java.lang.Override
17548      public final com.google.protobuf.UnknownFieldSet
17549          getUnknownFields() {
17550        return this.unknownFields;
17551      }
17552      private DiffEntry(
17553          com.google.protobuf.CodedInputStream input,
17554          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17555          throws com.google.protobuf.InvalidProtocolBufferException {
17556        initFields();
17557        int mutable_bitField0_ = 0;
17558        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
17559            com.google.protobuf.UnknownFieldSet.newBuilder();
17560        try {
17561          boolean done = false;
17562          while (!done) {
17563            int tag = input.readTag();
17564            switch (tag) {
17565              case 0:
17566                done = true;
17567                break;
17568              default: {
17569                if (!parseUnknownField(input, unknownFields,
17570                                       extensionRegistry, tag)) {
17571                  done = true;
17572                }
17573                break;
17574              }
17575              case 8: {
17576                int rawValue = input.readEnum();
17577                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type value = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.valueOf(rawValue);
17578                if (value == null) {
17579                  unknownFields.mergeVarintField(1, rawValue);
17580                } else {
17581                  bitField0_ |= 0x00000001;
17582                  type_ = value;
17583                }
17584                break;
17585              }
17586              case 16: {
17587                bitField0_ |= 0x00000002;
17588                inodeId_ = input.readUInt64();
17589                break;
17590              }
17591              case 24: {
17592                bitField0_ |= 0x00000004;
17593                numOfDiff_ = input.readUInt32();
17594                break;
17595              }
17596            }
17597          }
17598        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17599          throw e.setUnfinishedMessage(this);
17600        } catch (java.io.IOException e) {
17601          throw new com.google.protobuf.InvalidProtocolBufferException(
17602              e.getMessage()).setUnfinishedMessage(this);
17603        } finally {
17604          this.unknownFields = unknownFields.build();
17605          makeExtensionsImmutable();
17606        }
17607      }
17608      public static final com.google.protobuf.Descriptors.Descriptor
17609          getDescriptor() {
17610        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
17611      }
17612
17613      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17614          internalGetFieldAccessorTable() {
17615        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable
17616            .ensureFieldAccessorsInitialized(
17617                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Builder.class);
17618      }
17619
17620      public static com.google.protobuf.Parser<DiffEntry> PARSER =
17621          new com.google.protobuf.AbstractParser<DiffEntry>() {
17622        public DiffEntry parsePartialFrom(
17623            com.google.protobuf.CodedInputStream input,
17624            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17625            throws com.google.protobuf.InvalidProtocolBufferException {
17626          return new DiffEntry(input, extensionRegistry);
17627        }
17628      };
17629
17630      @java.lang.Override
17631      public com.google.protobuf.Parser<DiffEntry> getParserForType() {
17632        return PARSER;
17633      }
17634
17635      /**
17636       * Protobuf enum {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type}
17637       */
17638      public enum Type
17639          implements com.google.protobuf.ProtocolMessageEnum {
17640        /**
17641         * <code>FILEDIFF = 1;</code>
17642         */
17643        FILEDIFF(0, 1),
17644        /**
17645         * <code>DIRECTORYDIFF = 2;</code>
17646         */
17647        DIRECTORYDIFF(1, 2),
17648        ;
17649
17650        /**
17651         * <code>FILEDIFF = 1;</code>
17652         */
17653        public static final int FILEDIFF_VALUE = 1;
17654        /**
17655         * <code>DIRECTORYDIFF = 2;</code>
17656         */
17657        public static final int DIRECTORYDIFF_VALUE = 2;
17658
17659
17660        public final int getNumber() { return value; }
17661
17662        public static Type valueOf(int value) {
17663          switch (value) {
17664            case 1: return FILEDIFF;
17665            case 2: return DIRECTORYDIFF;
17666            default: return null;
17667          }
17668        }
17669
17670        public static com.google.protobuf.Internal.EnumLiteMap<Type>
17671            internalGetValueMap() {
17672          return internalValueMap;
17673        }
17674        private static com.google.protobuf.Internal.EnumLiteMap<Type>
17675            internalValueMap =
17676              new com.google.protobuf.Internal.EnumLiteMap<Type>() {
17677                public Type findValueByNumber(int number) {
17678                  return Type.valueOf(number);
17679                }
17680              };
17681
17682        public final com.google.protobuf.Descriptors.EnumValueDescriptor
17683            getValueDescriptor() {
17684          return getDescriptor().getValues().get(index);
17685        }
17686        public final com.google.protobuf.Descriptors.EnumDescriptor
17687            getDescriptorForType() {
17688          return getDescriptor();
17689        }
17690        public static final com.google.protobuf.Descriptors.EnumDescriptor
17691            getDescriptor() {
17692          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.getDescriptor().getEnumTypes().get(0);
17693        }
17694
17695        private static final Type[] VALUES = values();
17696
17697        public static Type valueOf(
17698            com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
17699          if (desc.getType() != getDescriptor()) {
17700            throw new java.lang.IllegalArgumentException(
17701              "EnumValueDescriptor is not for this type.");
17702          }
17703          return VALUES[desc.getIndex()];
17704        }
17705
17706        private final int index;
17707        private final int value;
17708
17709        private Type(int index, int value) {
17710          this.index = index;
17711          this.value = value;
17712        }
17713
17714        // @@protoc_insertion_point(enum_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type)
17715      }
17716
17717      private int bitField0_;
17718      // required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;
17719      public static final int TYPE_FIELD_NUMBER = 1;
17720      private org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type type_;
17721      /**
17722       * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
17723       */
17724      public boolean hasType() {
17725        return ((bitField0_ & 0x00000001) == 0x00000001);
17726      }
17727      /**
17728       * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
17729       */
17730      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type getType() {
17731        return type_;
17732      }
17733
17734      // optional uint64 inodeId = 2;
17735      public static final int INODEID_FIELD_NUMBER = 2;
17736      private long inodeId_;
17737      /**
17738       * <code>optional uint64 inodeId = 2;</code>
17739       */
17740      public boolean hasInodeId() {
17741        return ((bitField0_ & 0x00000002) == 0x00000002);
17742      }
17743      /**
17744       * <code>optional uint64 inodeId = 2;</code>
17745       */
17746      public long getInodeId() {
17747        return inodeId_;
17748      }
17749
17750      // optional uint32 numOfDiff = 3;
17751      public static final int NUMOFDIFF_FIELD_NUMBER = 3;
17752      private int numOfDiff_;
17753      /**
17754       * <code>optional uint32 numOfDiff = 3;</code>
17755       */
17756      public boolean hasNumOfDiff() {
17757        return ((bitField0_ & 0x00000004) == 0x00000004);
17758      }
17759      /**
17760       * <code>optional uint32 numOfDiff = 3;</code>
17761       */
17762      public int getNumOfDiff() {
17763        return numOfDiff_;
17764      }
17765
17766      private void initFields() {
17767        type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
17768        inodeId_ = 0L;
17769        numOfDiff_ = 0;
17770      }
17771      private byte memoizedIsInitialized = -1;
17772      public final boolean isInitialized() {
17773        byte isInitialized = memoizedIsInitialized;
17774        if (isInitialized != -1) return isInitialized == 1;
17775
17776        if (!hasType()) {
17777          memoizedIsInitialized = 0;
17778          return false;
17779        }
17780        memoizedIsInitialized = 1;
17781        return true;
17782      }
17783
17784      public void writeTo(com.google.protobuf.CodedOutputStream output)
17785                          throws java.io.IOException {
17786        getSerializedSize();
17787        if (((bitField0_ & 0x00000001) == 0x00000001)) {
17788          output.writeEnum(1, type_.getNumber());
17789        }
17790        if (((bitField0_ & 0x00000002) == 0x00000002)) {
17791          output.writeUInt64(2, inodeId_);
17792        }
17793        if (((bitField0_ & 0x00000004) == 0x00000004)) {
17794          output.writeUInt32(3, numOfDiff_);
17795        }
17796        getUnknownFields().writeTo(output);
17797      }
17798
17799      private int memoizedSerializedSize = -1;
17800      public int getSerializedSize() {
17801        int size = memoizedSerializedSize;
17802        if (size != -1) return size;
17803
17804        size = 0;
17805        if (((bitField0_ & 0x00000001) == 0x00000001)) {
17806          size += com.google.protobuf.CodedOutputStream
17807            .computeEnumSize(1, type_.getNumber());
17808        }
17809        if (((bitField0_ & 0x00000002) == 0x00000002)) {
17810          size += com.google.protobuf.CodedOutputStream
17811            .computeUInt64Size(2, inodeId_);
17812        }
17813        if (((bitField0_ & 0x00000004) == 0x00000004)) {
17814          size += com.google.protobuf.CodedOutputStream
17815            .computeUInt32Size(3, numOfDiff_);
17816        }
17817        size += getUnknownFields().getSerializedSize();
17818        memoizedSerializedSize = size;
17819        return size;
17820      }
17821
17822      private static final long serialVersionUID = 0L;
17823      @java.lang.Override
17824      protected java.lang.Object writeReplace()
17825          throws java.io.ObjectStreamException {
17826        return super.writeReplace();
17827      }
17828
17829      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17830          com.google.protobuf.ByteString data)
17831          throws com.google.protobuf.InvalidProtocolBufferException {
17832        return PARSER.parseFrom(data);
17833      }
17834      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17835          com.google.protobuf.ByteString data,
17836          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17837          throws com.google.protobuf.InvalidProtocolBufferException {
17838        return PARSER.parseFrom(data, extensionRegistry);
17839      }
17840      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(byte[] data)
17841          throws com.google.protobuf.InvalidProtocolBufferException {
17842        return PARSER.parseFrom(data);
17843      }
17844      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17845          byte[] data,
17846          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17847          throws com.google.protobuf.InvalidProtocolBufferException {
17848        return PARSER.parseFrom(data, extensionRegistry);
17849      }
17850      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(java.io.InputStream input)
17851          throws java.io.IOException {
17852        return PARSER.parseFrom(input);
17853      }
17854      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17855          java.io.InputStream input,
17856          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17857          throws java.io.IOException {
17858        return PARSER.parseFrom(input, extensionRegistry);
17859      }
17860      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseDelimitedFrom(java.io.InputStream input)
17861          throws java.io.IOException {
17862        return PARSER.parseDelimitedFrom(input);
17863      }
17864      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseDelimitedFrom(
17865          java.io.InputStream input,
17866          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17867          throws java.io.IOException {
17868        return PARSER.parseDelimitedFrom(input, extensionRegistry);
17869      }
17870      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17871          com.google.protobuf.CodedInputStream input)
17872          throws java.io.IOException {
17873        return PARSER.parseFrom(input);
17874      }
17875      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parseFrom(
17876          com.google.protobuf.CodedInputStream input,
17877          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17878          throws java.io.IOException {
17879        return PARSER.parseFrom(input, extensionRegistry);
17880      }
17881
17882      public static Builder newBuilder() { return Builder.create(); }
17883      public Builder newBuilderForType() { return newBuilder(); }
17884      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry prototype) {
17885        return newBuilder().mergeFrom(prototype);
17886      }
17887      public Builder toBuilder() { return newBuilder(this); }
17888
17889      @java.lang.Override
17890      protected Builder newBuilderForType(
17891          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17892        Builder builder = new Builder(parent);
17893        return builder;
17894      }
17895      /**
17896       * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry}
17897       */
17898      public static final class Builder extends
17899          com.google.protobuf.GeneratedMessage.Builder<Builder>
17900         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntryOrBuilder {
17901        public static final com.google.protobuf.Descriptors.Descriptor
17902            getDescriptor() {
17903          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
17904        }
17905
17906        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17907            internalGetFieldAccessorTable() {
17908          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable
17909              .ensureFieldAccessorsInitialized(
17910                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Builder.class);
17911        }
17912
17913        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.newBuilder()
17914        private Builder() {
17915          maybeForceBuilderInitialization();
17916        }
17917
17918        private Builder(
17919            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17920          super(parent);
17921          maybeForceBuilderInitialization();
17922        }
17923        private void maybeForceBuilderInitialization() {
17924          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17925          }
17926        }
17927        private static Builder create() {
17928          return new Builder();
17929        }
17930
17931        public Builder clear() {
17932          super.clear();
17933          type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
17934          bitField0_ = (bitField0_ & ~0x00000001);
17935          inodeId_ = 0L;
17936          bitField0_ = (bitField0_ & ~0x00000002);
17937          numOfDiff_ = 0;
17938          bitField0_ = (bitField0_ & ~0x00000004);
17939          return this;
17940        }
17941
17942        public Builder clone() {
17943          return create().mergeFrom(buildPartial());
17944        }
17945
17946        public com.google.protobuf.Descriptors.Descriptor
17947            getDescriptorForType() {
17948          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
17949        }
17950
17951        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry getDefaultInstanceForType() {
17952          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.getDefaultInstance();
17953        }
17954
17955        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry build() {
17956          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry result = buildPartial();
17957          if (!result.isInitialized()) {
17958            throw newUninitializedMessageException(result);
17959          }
17960          return result;
17961        }
17962
17963        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry buildPartial() {
17964          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry(this);
17965          int from_bitField0_ = bitField0_;
17966          int to_bitField0_ = 0;
17967          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
17968            to_bitField0_ |= 0x00000001;
17969          }
17970          result.type_ = type_;
17971          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
17972            to_bitField0_ |= 0x00000002;
17973          }
17974          result.inodeId_ = inodeId_;
17975          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
17976            to_bitField0_ |= 0x00000004;
17977          }
17978          result.numOfDiff_ = numOfDiff_;
17979          result.bitField0_ = to_bitField0_;
17980          onBuilt();
17981          return result;
17982        }
17983
17984        public Builder mergeFrom(com.google.protobuf.Message other) {
17985          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry) {
17986            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry)other);
17987          } else {
17988            super.mergeFrom(other);
17989            return this;
17990          }
17991        }
17992
17993        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry other) {
17994          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.getDefaultInstance()) return this;
17995          if (other.hasType()) {
17996            setType(other.getType());
17997          }
17998          if (other.hasInodeId()) {
17999            setInodeId(other.getInodeId());
18000          }
18001          if (other.hasNumOfDiff()) {
18002            setNumOfDiff(other.getNumOfDiff());
18003          }
18004          this.mergeUnknownFields(other.getUnknownFields());
18005          return this;
18006        }
18007
18008        public final boolean isInitialized() {
18009          if (!hasType()) {
18010            
18011            return false;
18012          }
18013          return true;
18014        }
18015
18016        public Builder mergeFrom(
18017            com.google.protobuf.CodedInputStream input,
18018            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18019            throws java.io.IOException {
18020          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry parsedMessage = null;
18021          try {
18022            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18023          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18024            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry) e.getUnfinishedMessage();
18025            throw e;
18026          } finally {
18027            if (parsedMessage != null) {
18028              mergeFrom(parsedMessage);
18029            }
18030          }
18031          return this;
18032        }
18033        private int bitField0_;
18034
18035        // required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;
18036        private org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
18037        /**
18038         * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
18039         */
18040        public boolean hasType() {
18041          return ((bitField0_ & 0x00000001) == 0x00000001);
18042        }
18043        /**
18044         * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
18045         */
18046        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type getType() {
18047          return type_;
18048        }
18049        /**
18050         * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
18051         */
18052        public Builder setType(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type value) {
18053          if (value == null) {
18054            throw new NullPointerException();
18055          }
18056          bitField0_ |= 0x00000001;
18057          type_ = value;
18058          onChanged();
18059          return this;
18060        }
18061        /**
18062         * <code>required .hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry.Type type = 1;</code>
18063         */
18064        public Builder clearType() {
18065          bitField0_ = (bitField0_ & ~0x00000001);
18066          type_ = org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type.FILEDIFF;
18067          onChanged();
18068          return this;
18069        }
18070
18071        // optional uint64 inodeId = 2;
18072        private long inodeId_ ;
18073        /**
18074         * <code>optional uint64 inodeId = 2;</code>
18075         */
18076        public boolean hasInodeId() {
18077          return ((bitField0_ & 0x00000002) == 0x00000002);
18078        }
18079        /**
18080         * <code>optional uint64 inodeId = 2;</code>
18081         */
18082        public long getInodeId() {
18083          return inodeId_;
18084        }
18085        /**
18086         * <code>optional uint64 inodeId = 2;</code>
18087         */
18088        public Builder setInodeId(long value) {
18089          bitField0_ |= 0x00000002;
18090          inodeId_ = value;
18091          onChanged();
18092          return this;
18093        }
18094        /**
18095         * <code>optional uint64 inodeId = 2;</code>
18096         */
18097        public Builder clearInodeId() {
18098          bitField0_ = (bitField0_ & ~0x00000002);
18099          inodeId_ = 0L;
18100          onChanged();
18101          return this;
18102        }
18103
18104        // optional uint32 numOfDiff = 3;
18105        private int numOfDiff_ ;
18106        /**
18107         * <code>optional uint32 numOfDiff = 3;</code>
18108         */
18109        public boolean hasNumOfDiff() {
18110          return ((bitField0_ & 0x00000004) == 0x00000004);
18111        }
18112        /**
18113         * <code>optional uint32 numOfDiff = 3;</code>
18114         */
18115        public int getNumOfDiff() {
18116          return numOfDiff_;
18117        }
18118        /**
18119         * <code>optional uint32 numOfDiff = 3;</code>
18120         */
18121        public Builder setNumOfDiff(int value) {
18122          bitField0_ |= 0x00000004;
18123          numOfDiff_ = value;
18124          onChanged();
18125          return this;
18126        }
18127        /**
18128         * <code>optional uint32 numOfDiff = 3;</code>
18129         */
18130        public Builder clearNumOfDiff() {
18131          bitField0_ = (bitField0_ & ~0x00000004);
18132          numOfDiff_ = 0;
18133          onChanged();
18134          return this;
18135        }
18136
18137        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry)
18138      }
18139
18140      static {
18141        defaultInstance = new DiffEntry(true);
18142        defaultInstance.initFields();
18143      }
18144
18145      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection.DiffEntry)
18146    }
18147
18148    private void initFields() {
18149    }
18150    private byte memoizedIsInitialized = -1;
18151    public final boolean isInitialized() {
18152      byte isInitialized = memoizedIsInitialized;
18153      if (isInitialized != -1) return isInitialized == 1;
18154
18155      memoizedIsInitialized = 1;
18156      return true;
18157    }
18158
18159    public void writeTo(com.google.protobuf.CodedOutputStream output)
18160                        throws java.io.IOException {
18161      getSerializedSize();
18162      getUnknownFields().writeTo(output);
18163    }
18164
18165    private int memoizedSerializedSize = -1;
18166    public int getSerializedSize() {
18167      int size = memoizedSerializedSize;
18168      if (size != -1) return size;
18169
18170      size = 0;
18171      size += getUnknownFields().getSerializedSize();
18172      memoizedSerializedSize = size;
18173      return size;
18174    }
18175
18176    private static final long serialVersionUID = 0L;
18177    @java.lang.Override
18178    protected java.lang.Object writeReplace()
18179        throws java.io.ObjectStreamException {
18180      return super.writeReplace();
18181    }
18182
18183    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18184        com.google.protobuf.ByteString data)
18185        throws com.google.protobuf.InvalidProtocolBufferException {
18186      return PARSER.parseFrom(data);
18187    }
18188    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18189        com.google.protobuf.ByteString data,
18190        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18191        throws com.google.protobuf.InvalidProtocolBufferException {
18192      return PARSER.parseFrom(data, extensionRegistry);
18193    }
18194    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(byte[] data)
18195        throws com.google.protobuf.InvalidProtocolBufferException {
18196      return PARSER.parseFrom(data);
18197    }
18198    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18199        byte[] data,
18200        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18201        throws com.google.protobuf.InvalidProtocolBufferException {
18202      return PARSER.parseFrom(data, extensionRegistry);
18203    }
18204    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(java.io.InputStream input)
18205        throws java.io.IOException {
18206      return PARSER.parseFrom(input);
18207    }
18208    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18209        java.io.InputStream input,
18210        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18211        throws java.io.IOException {
18212      return PARSER.parseFrom(input, extensionRegistry);
18213    }
18214    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseDelimitedFrom(java.io.InputStream input)
18215        throws java.io.IOException {
18216      return PARSER.parseDelimitedFrom(input);
18217    }
18218    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseDelimitedFrom(
18219        java.io.InputStream input,
18220        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18221        throws java.io.IOException {
18222      return PARSER.parseDelimitedFrom(input, extensionRegistry);
18223    }
18224    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18225        com.google.protobuf.CodedInputStream input)
18226        throws java.io.IOException {
18227      return PARSER.parseFrom(input);
18228    }
18229    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parseFrom(
18230        com.google.protobuf.CodedInputStream input,
18231        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18232        throws java.io.IOException {
18233      return PARSER.parseFrom(input, extensionRegistry);
18234    }
18235
18236    public static Builder newBuilder() { return Builder.create(); }
18237    public Builder newBuilderForType() { return newBuilder(); }
18238    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection prototype) {
18239      return newBuilder().mergeFrom(prototype);
18240    }
18241    public Builder toBuilder() { return newBuilder(this); }
18242
18243    @java.lang.Override
18244    protected Builder newBuilderForType(
18245        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18246      Builder builder = new Builder(parent);
18247      return builder;
18248    }
18249    /**
18250     * Protobuf type {@code hadoop.hdfs.fsimage.SnapshotDiffSection}
18251     *
18252     * <pre>
18253     **
18254     * This section records information about snapshot diffs
18255     * NAME: SNAPSHOT_DIFF
18256     * </pre>
18257     */
18258    public static final class Builder extends
18259        com.google.protobuf.GeneratedMessage.Builder<Builder>
18260       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSectionOrBuilder {
18261      public static final com.google.protobuf.Descriptors.Descriptor
18262          getDescriptor() {
18263        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
18264      }
18265
18266      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18267          internalGetFieldAccessorTable() {
18268        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable
18269            .ensureFieldAccessorsInitialized(
18270                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.Builder.class);
18271      }
18272
18273      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.newBuilder()
18274      private Builder() {
18275        maybeForceBuilderInitialization();
18276      }
18277
18278      private Builder(
18279          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18280        super(parent);
18281        maybeForceBuilderInitialization();
18282      }
18283      private void maybeForceBuilderInitialization() {
18284        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18285        }
18286      }
18287      private static Builder create() {
18288        return new Builder();
18289      }
18290
18291      public Builder clear() {
18292        super.clear();
18293        return this;
18294      }
18295
18296      public Builder clone() {
18297        return create().mergeFrom(buildPartial());
18298      }
18299
18300      public com.google.protobuf.Descriptors.Descriptor
18301          getDescriptorForType() {
18302        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
18303      }
18304
18305      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection getDefaultInstanceForType() {
18306        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.getDefaultInstance();
18307      }
18308
18309      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection build() {
18310        org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection result = buildPartial();
18311        if (!result.isInitialized()) {
18312          throw newUninitializedMessageException(result);
18313        }
18314        return result;
18315      }
18316
18317      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection buildPartial() {
18318        org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection(this);
18319        onBuilt();
18320        return result;
18321      }
18322
18323      public Builder mergeFrom(com.google.protobuf.Message other) {
18324        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection) {
18325          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection)other);
18326        } else {
18327          super.mergeFrom(other);
18328          return this;
18329        }
18330      }
18331
18332      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection other) {
18333        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.getDefaultInstance()) return this;
18334        this.mergeUnknownFields(other.getUnknownFields());
18335        return this;
18336      }
18337
18338      public final boolean isInitialized() {
18339        return true;
18340      }
18341
18342      public Builder mergeFrom(
18343          com.google.protobuf.CodedInputStream input,
18344          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18345          throws java.io.IOException {
18346        org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection parsedMessage = null;
18347        try {
18348          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18349        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18350          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection) e.getUnfinishedMessage();
18351          throw e;
18352        } finally {
18353          if (parsedMessage != null) {
18354            mergeFrom(parsedMessage);
18355          }
18356        }
18357        return this;
18358      }
18359
18360      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SnapshotDiffSection)
18361    }
18362
18363    static {
18364      defaultInstance = new SnapshotDiffSection(true);
18365      defaultInstance.initFields();
18366    }
18367
18368    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SnapshotDiffSection)
18369  }
18370
18371  public interface StringTableSectionOrBuilder
18372      extends com.google.protobuf.MessageOrBuilder {
18373
18374    // optional uint32 numEntry = 1;
18375    /**
18376     * <code>optional uint32 numEntry = 1;</code>
18377     *
18378     * <pre>
18379     * repeated Entry
18380     * </pre>
18381     */
18382    boolean hasNumEntry();
18383    /**
18384     * <code>optional uint32 numEntry = 1;</code>
18385     *
18386     * <pre>
18387     * repeated Entry
18388     * </pre>
18389     */
18390    int getNumEntry();
18391  }
18392  /**
18393   * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection}
18394   *
18395   * <pre>
18396   **
18397   * This section maps string to id
18398   * NAME: STRING_TABLE
18399   * </pre>
18400   */
18401  public static final class StringTableSection extends
18402      com.google.protobuf.GeneratedMessage
18403      implements StringTableSectionOrBuilder {
18404    // Use StringTableSection.newBuilder() to construct.
18405    private StringTableSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
18406      super(builder);
18407      this.unknownFields = builder.getUnknownFields();
18408    }
18409    private StringTableSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18410
18411    private static final StringTableSection defaultInstance;
18412    public static StringTableSection getDefaultInstance() {
18413      return defaultInstance;
18414    }
18415
18416    public StringTableSection getDefaultInstanceForType() {
18417      return defaultInstance;
18418    }
18419
18420    private final com.google.protobuf.UnknownFieldSet unknownFields;
18421    @java.lang.Override
18422    public final com.google.protobuf.UnknownFieldSet
18423        getUnknownFields() {
18424      return this.unknownFields;
18425    }
18426    private StringTableSection(
18427        com.google.protobuf.CodedInputStream input,
18428        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18429        throws com.google.protobuf.InvalidProtocolBufferException {
18430      initFields();
18431      int mutable_bitField0_ = 0;
18432      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18433          com.google.protobuf.UnknownFieldSet.newBuilder();
18434      try {
18435        boolean done = false;
18436        while (!done) {
18437          int tag = input.readTag();
18438          switch (tag) {
18439            case 0:
18440              done = true;
18441              break;
18442            default: {
18443              if (!parseUnknownField(input, unknownFields,
18444                                     extensionRegistry, tag)) {
18445                done = true;
18446              }
18447              break;
18448            }
18449            case 8: {
18450              bitField0_ |= 0x00000001;
18451              numEntry_ = input.readUInt32();
18452              break;
18453            }
18454          }
18455        }
18456      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18457        throw e.setUnfinishedMessage(this);
18458      } catch (java.io.IOException e) {
18459        throw new com.google.protobuf.InvalidProtocolBufferException(
18460            e.getMessage()).setUnfinishedMessage(this);
18461      } finally {
18462        this.unknownFields = unknownFields.build();
18463        makeExtensionsImmutable();
18464      }
18465    }
18466    public static final com.google.protobuf.Descriptors.Descriptor
18467        getDescriptor() {
18468      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
18469    }
18470
18471    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18472        internalGetFieldAccessorTable() {
18473      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable
18474          .ensureFieldAccessorsInitialized(
18475              org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Builder.class);
18476    }
18477
18478    public static com.google.protobuf.Parser<StringTableSection> PARSER =
18479        new com.google.protobuf.AbstractParser<StringTableSection>() {
18480      public StringTableSection parsePartialFrom(
18481          com.google.protobuf.CodedInputStream input,
18482          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18483          throws com.google.protobuf.InvalidProtocolBufferException {
18484        return new StringTableSection(input, extensionRegistry);
18485      }
18486    };
18487
18488    @java.lang.Override
18489    public com.google.protobuf.Parser<StringTableSection> getParserForType() {
18490      return PARSER;
18491    }
18492
18493    public interface EntryOrBuilder
18494        extends com.google.protobuf.MessageOrBuilder {
18495
18496      // optional uint32 id = 1;
18497      /**
18498       * <code>optional uint32 id = 1;</code>
18499       */
18500      boolean hasId();
18501      /**
18502       * <code>optional uint32 id = 1;</code>
18503       */
18504      int getId();
18505
18506      // optional string str = 2;
18507      /**
18508       * <code>optional string str = 2;</code>
18509       */
18510      boolean hasStr();
18511      /**
18512       * <code>optional string str = 2;</code>
18513       */
18514      java.lang.String getStr();
18515      /**
18516       * <code>optional string str = 2;</code>
18517       */
18518      com.google.protobuf.ByteString
18519          getStrBytes();
18520    }
18521    /**
18522     * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection.Entry}
18523     */
18524    public static final class Entry extends
18525        com.google.protobuf.GeneratedMessage
18526        implements EntryOrBuilder {
18527      // Use Entry.newBuilder() to construct.
18528      private Entry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
18529        super(builder);
18530        this.unknownFields = builder.getUnknownFields();
18531      }
18532      private Entry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18533
18534      private static final Entry defaultInstance;
18535      public static Entry getDefaultInstance() {
18536        return defaultInstance;
18537      }
18538
18539      public Entry getDefaultInstanceForType() {
18540        return defaultInstance;
18541      }
18542
18543      private final com.google.protobuf.UnknownFieldSet unknownFields;
18544      @java.lang.Override
18545      public final com.google.protobuf.UnknownFieldSet
18546          getUnknownFields() {
18547        return this.unknownFields;
18548      }
18549      private Entry(
18550          com.google.protobuf.CodedInputStream input,
18551          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18552          throws com.google.protobuf.InvalidProtocolBufferException {
18553        initFields();
18554        int mutable_bitField0_ = 0;
18555        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18556            com.google.protobuf.UnknownFieldSet.newBuilder();
18557        try {
18558          boolean done = false;
18559          while (!done) {
18560            int tag = input.readTag();
18561            switch (tag) {
18562              case 0:
18563                done = true;
18564                break;
18565              default: {
18566                if (!parseUnknownField(input, unknownFields,
18567                                       extensionRegistry, tag)) {
18568                  done = true;
18569                }
18570                break;
18571              }
18572              case 8: {
18573                bitField0_ |= 0x00000001;
18574                id_ = input.readUInt32();
18575                break;
18576              }
18577              case 18: {
18578                bitField0_ |= 0x00000002;
18579                str_ = input.readBytes();
18580                break;
18581              }
18582            }
18583          }
18584        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18585          throw e.setUnfinishedMessage(this);
18586        } catch (java.io.IOException e) {
18587          throw new com.google.protobuf.InvalidProtocolBufferException(
18588              e.getMessage()).setUnfinishedMessage(this);
18589        } finally {
18590          this.unknownFields = unknownFields.build();
18591          makeExtensionsImmutable();
18592        }
18593      }
18594      public static final com.google.protobuf.Descriptors.Descriptor
18595          getDescriptor() {
18596        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
18597      }
18598
18599      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18600          internalGetFieldAccessorTable() {
18601        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable
18602            .ensureFieldAccessorsInitialized(
18603                org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.Builder.class);
18604      }
18605
18606      public static com.google.protobuf.Parser<Entry> PARSER =
18607          new com.google.protobuf.AbstractParser<Entry>() {
18608        public Entry parsePartialFrom(
18609            com.google.protobuf.CodedInputStream input,
18610            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18611            throws com.google.protobuf.InvalidProtocolBufferException {
18612          return new Entry(input, extensionRegistry);
18613        }
18614      };
18615
18616      @java.lang.Override
18617      public com.google.protobuf.Parser<Entry> getParserForType() {
18618        return PARSER;
18619      }
18620
18621      private int bitField0_;
18622      // optional uint32 id = 1;
18623      public static final int ID_FIELD_NUMBER = 1;
18624      private int id_;
18625      /**
18626       * <code>optional uint32 id = 1;</code>
18627       */
18628      public boolean hasId() {
18629        return ((bitField0_ & 0x00000001) == 0x00000001);
18630      }
18631      /**
18632       * <code>optional uint32 id = 1;</code>
18633       */
18634      public int getId() {
18635        return id_;
18636      }
18637
18638      // optional string str = 2;
18639      public static final int STR_FIELD_NUMBER = 2;
18640      private java.lang.Object str_;
18641      /**
18642       * <code>optional string str = 2;</code>
18643       */
18644      public boolean hasStr() {
18645        return ((bitField0_ & 0x00000002) == 0x00000002);
18646      }
18647      /**
18648       * <code>optional string str = 2;</code>
18649       */
18650      public java.lang.String getStr() {
18651        java.lang.Object ref = str_;
18652        if (ref instanceof java.lang.String) {
18653          return (java.lang.String) ref;
18654        } else {
18655          com.google.protobuf.ByteString bs = 
18656              (com.google.protobuf.ByteString) ref;
18657          java.lang.String s = bs.toStringUtf8();
18658          if (bs.isValidUtf8()) {
18659            str_ = s;
18660          }
18661          return s;
18662        }
18663      }
18664      /**
18665       * <code>optional string str = 2;</code>
18666       */
18667      public com.google.protobuf.ByteString
18668          getStrBytes() {
18669        java.lang.Object ref = str_;
18670        if (ref instanceof java.lang.String) {
18671          com.google.protobuf.ByteString b = 
18672              com.google.protobuf.ByteString.copyFromUtf8(
18673                  (java.lang.String) ref);
18674          str_ = b;
18675          return b;
18676        } else {
18677          return (com.google.protobuf.ByteString) ref;
18678        }
18679      }
18680
18681      private void initFields() {
18682        id_ = 0;
18683        str_ = "";
18684      }
18685      private byte memoizedIsInitialized = -1;
18686      public final boolean isInitialized() {
18687        byte isInitialized = memoizedIsInitialized;
18688        if (isInitialized != -1) return isInitialized == 1;
18689
18690        memoizedIsInitialized = 1;
18691        return true;
18692      }
18693
18694      public void writeTo(com.google.protobuf.CodedOutputStream output)
18695                          throws java.io.IOException {
18696        getSerializedSize();
18697        if (((bitField0_ & 0x00000001) == 0x00000001)) {
18698          output.writeUInt32(1, id_);
18699        }
18700        if (((bitField0_ & 0x00000002) == 0x00000002)) {
18701          output.writeBytes(2, getStrBytes());
18702        }
18703        getUnknownFields().writeTo(output);
18704      }
18705
18706      private int memoizedSerializedSize = -1;
18707      public int getSerializedSize() {
18708        int size = memoizedSerializedSize;
18709        if (size != -1) return size;
18710
18711        size = 0;
18712        if (((bitField0_ & 0x00000001) == 0x00000001)) {
18713          size += com.google.protobuf.CodedOutputStream
18714            .computeUInt32Size(1, id_);
18715        }
18716        if (((bitField0_ & 0x00000002) == 0x00000002)) {
18717          size += com.google.protobuf.CodedOutputStream
18718            .computeBytesSize(2, getStrBytes());
18719        }
18720        size += getUnknownFields().getSerializedSize();
18721        memoizedSerializedSize = size;
18722        return size;
18723      }
18724
18725      private static final long serialVersionUID = 0L;
18726      @java.lang.Override
18727      protected java.lang.Object writeReplace()
18728          throws java.io.ObjectStreamException {
18729        return super.writeReplace();
18730      }
18731
18732      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18733          com.google.protobuf.ByteString data)
18734          throws com.google.protobuf.InvalidProtocolBufferException {
18735        return PARSER.parseFrom(data);
18736      }
18737      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18738          com.google.protobuf.ByteString data,
18739          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18740          throws com.google.protobuf.InvalidProtocolBufferException {
18741        return PARSER.parseFrom(data, extensionRegistry);
18742      }
18743      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(byte[] data)
18744          throws com.google.protobuf.InvalidProtocolBufferException {
18745        return PARSER.parseFrom(data);
18746      }
18747      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18748          byte[] data,
18749          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18750          throws com.google.protobuf.InvalidProtocolBufferException {
18751        return PARSER.parseFrom(data, extensionRegistry);
18752      }
18753      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(java.io.InputStream input)
18754          throws java.io.IOException {
18755        return PARSER.parseFrom(input);
18756      }
18757      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18758          java.io.InputStream input,
18759          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18760          throws java.io.IOException {
18761        return PARSER.parseFrom(input, extensionRegistry);
18762      }
18763      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseDelimitedFrom(java.io.InputStream input)
18764          throws java.io.IOException {
18765        return PARSER.parseDelimitedFrom(input);
18766      }
18767      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseDelimitedFrom(
18768          java.io.InputStream input,
18769          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18770          throws java.io.IOException {
18771        return PARSER.parseDelimitedFrom(input, extensionRegistry);
18772      }
18773      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18774          com.google.protobuf.CodedInputStream input)
18775          throws java.io.IOException {
18776        return PARSER.parseFrom(input);
18777      }
18778      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parseFrom(
18779          com.google.protobuf.CodedInputStream input,
18780          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18781          throws java.io.IOException {
18782        return PARSER.parseFrom(input, extensionRegistry);
18783      }
18784
18785      public static Builder newBuilder() { return Builder.create(); }
18786      public Builder newBuilderForType() { return newBuilder(); }
18787      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry prototype) {
18788        return newBuilder().mergeFrom(prototype);
18789      }
18790      public Builder toBuilder() { return newBuilder(this); }
18791
18792      @java.lang.Override
18793      protected Builder newBuilderForType(
18794          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18795        Builder builder = new Builder(parent);
18796        return builder;
18797      }
18798      /**
18799       * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection.Entry}
18800       */
18801      public static final class Builder extends
18802          com.google.protobuf.GeneratedMessage.Builder<Builder>
18803         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.EntryOrBuilder {
18804        public static final com.google.protobuf.Descriptors.Descriptor
18805            getDescriptor() {
18806          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
18807        }
18808
18809        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18810            internalGetFieldAccessorTable() {
18811          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable
18812              .ensureFieldAccessorsInitialized(
18813                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.Builder.class);
18814        }
18815
18816        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.newBuilder()
18817        private Builder() {
18818          maybeForceBuilderInitialization();
18819        }
18820
18821        private Builder(
18822            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18823          super(parent);
18824          maybeForceBuilderInitialization();
18825        }
18826        private void maybeForceBuilderInitialization() {
18827          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18828          }
18829        }
18830        private static Builder create() {
18831          return new Builder();
18832        }
18833
18834        public Builder clear() {
18835          super.clear();
18836          id_ = 0;
18837          bitField0_ = (bitField0_ & ~0x00000001);
18838          str_ = "";
18839          bitField0_ = (bitField0_ & ~0x00000002);
18840          return this;
18841        }
18842
18843        public Builder clone() {
18844          return create().mergeFrom(buildPartial());
18845        }
18846
18847        public com.google.protobuf.Descriptors.Descriptor
18848            getDescriptorForType() {
18849          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
18850        }
18851
18852        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry getDefaultInstanceForType() {
18853          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.getDefaultInstance();
18854        }
18855
18856        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry build() {
18857          org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry result = buildPartial();
18858          if (!result.isInitialized()) {
18859            throw newUninitializedMessageException(result);
18860          }
18861          return result;
18862        }
18863
18864        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry buildPartial() {
18865          org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry(this);
18866          int from_bitField0_ = bitField0_;
18867          int to_bitField0_ = 0;
18868          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
18869            to_bitField0_ |= 0x00000001;
18870          }
18871          result.id_ = id_;
18872          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
18873            to_bitField0_ |= 0x00000002;
18874          }
18875          result.str_ = str_;
18876          result.bitField0_ = to_bitField0_;
18877          onBuilt();
18878          return result;
18879        }
18880
18881        public Builder mergeFrom(com.google.protobuf.Message other) {
18882          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry) {
18883            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry)other);
18884          } else {
18885            super.mergeFrom(other);
18886            return this;
18887          }
18888        }
18889
18890        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry other) {
18891          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry.getDefaultInstance()) return this;
18892          if (other.hasId()) {
18893            setId(other.getId());
18894          }
18895          if (other.hasStr()) {
18896            bitField0_ |= 0x00000002;
18897            str_ = other.str_;
18898            onChanged();
18899          }
18900          this.mergeUnknownFields(other.getUnknownFields());
18901          return this;
18902        }
18903
18904        public final boolean isInitialized() {
18905          return true;
18906        }
18907
18908        public Builder mergeFrom(
18909            com.google.protobuf.CodedInputStream input,
18910            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18911            throws java.io.IOException {
18912          org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry parsedMessage = null;
18913          try {
18914            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18915          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18916            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Entry) e.getUnfinishedMessage();
18917            throw e;
18918          } finally {
18919            if (parsedMessage != null) {
18920              mergeFrom(parsedMessage);
18921            }
18922          }
18923          return this;
18924        }
18925        private int bitField0_;
18926
18927        // optional uint32 id = 1;
18928        private int id_ ;
18929        /**
18930         * <code>optional uint32 id = 1;</code>
18931         */
18932        public boolean hasId() {
18933          return ((bitField0_ & 0x00000001) == 0x00000001);
18934        }
18935        /**
18936         * <code>optional uint32 id = 1;</code>
18937         */
18938        public int getId() {
18939          return id_;
18940        }
18941        /**
18942         * <code>optional uint32 id = 1;</code>
18943         */
18944        public Builder setId(int value) {
18945          bitField0_ |= 0x00000001;
18946          id_ = value;
18947          onChanged();
18948          return this;
18949        }
18950        /**
18951         * <code>optional uint32 id = 1;</code>
18952         */
18953        public Builder clearId() {
18954          bitField0_ = (bitField0_ & ~0x00000001);
18955          id_ = 0;
18956          onChanged();
18957          return this;
18958        }
18959
18960        // optional string str = 2;
18961        private java.lang.Object str_ = "";
18962        /**
18963         * <code>optional string str = 2;</code>
18964         */
18965        public boolean hasStr() {
18966          return ((bitField0_ & 0x00000002) == 0x00000002);
18967        }
18968        /**
18969         * <code>optional string str = 2;</code>
18970         */
18971        public java.lang.String getStr() {
18972          java.lang.Object ref = str_;
18973          if (!(ref instanceof java.lang.String)) {
18974            java.lang.String s = ((com.google.protobuf.ByteString) ref)
18975                .toStringUtf8();
18976            str_ = s;
18977            return s;
18978          } else {
18979            return (java.lang.String) ref;
18980          }
18981        }
18982        /**
18983         * <code>optional string str = 2;</code>
18984         */
18985        public com.google.protobuf.ByteString
18986            getStrBytes() {
18987          java.lang.Object ref = str_;
18988          if (ref instanceof String) {
18989            com.google.protobuf.ByteString b = 
18990                com.google.protobuf.ByteString.copyFromUtf8(
18991                    (java.lang.String) ref);
18992            str_ = b;
18993            return b;
18994          } else {
18995            return (com.google.protobuf.ByteString) ref;
18996          }
18997        }
18998        /**
18999         * <code>optional string str = 2;</code>
19000         */
19001        public Builder setStr(
19002            java.lang.String value) {
19003          if (value == null) {
19004    throw new NullPointerException();
19005  }
19006  bitField0_ |= 0x00000002;
19007          str_ = value;
19008          onChanged();
19009          return this;
19010        }
19011        /**
19012         * <code>optional string str = 2;</code>
19013         */
19014        public Builder clearStr() {
19015          bitField0_ = (bitField0_ & ~0x00000002);
19016          str_ = getDefaultInstance().getStr();
19017          onChanged();
19018          return this;
19019        }
19020        /**
19021         * <code>optional string str = 2;</code>
19022         */
19023        public Builder setStrBytes(
19024            com.google.protobuf.ByteString value) {
19025          if (value == null) {
19026    throw new NullPointerException();
19027  }
19028  bitField0_ |= 0x00000002;
19029          str_ = value;
19030          onChanged();
19031          return this;
19032        }
19033
19034        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.StringTableSection.Entry)
19035      }
19036
19037      static {
19038        defaultInstance = new Entry(true);
19039        defaultInstance.initFields();
19040      }
19041
19042      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.StringTableSection.Entry)
19043    }
19044
19045    private int bitField0_;
19046    // optional uint32 numEntry = 1;
19047    public static final int NUMENTRY_FIELD_NUMBER = 1;
19048    private int numEntry_;
19049    /**
19050     * <code>optional uint32 numEntry = 1;</code>
19051     *
19052     * <pre>
19053     * repeated Entry
19054     * </pre>
19055     */
19056    public boolean hasNumEntry() {
19057      return ((bitField0_ & 0x00000001) == 0x00000001);
19058    }
19059    /**
19060     * <code>optional uint32 numEntry = 1;</code>
19061     *
19062     * <pre>
19063     * repeated Entry
19064     * </pre>
19065     */
19066    public int getNumEntry() {
19067      return numEntry_;
19068    }
19069
19070    private void initFields() {
19071      numEntry_ = 0;
19072    }
19073    private byte memoizedIsInitialized = -1;
19074    public final boolean isInitialized() {
19075      byte isInitialized = memoizedIsInitialized;
19076      if (isInitialized != -1) return isInitialized == 1;
19077
19078      memoizedIsInitialized = 1;
19079      return true;
19080    }
19081
19082    public void writeTo(com.google.protobuf.CodedOutputStream output)
19083                        throws java.io.IOException {
19084      getSerializedSize();
19085      if (((bitField0_ & 0x00000001) == 0x00000001)) {
19086        output.writeUInt32(1, numEntry_);
19087      }
19088      getUnknownFields().writeTo(output);
19089    }
19090
19091    private int memoizedSerializedSize = -1;
19092    public int getSerializedSize() {
19093      int size = memoizedSerializedSize;
19094      if (size != -1) return size;
19095
19096      size = 0;
19097      if (((bitField0_ & 0x00000001) == 0x00000001)) {
19098        size += com.google.protobuf.CodedOutputStream
19099          .computeUInt32Size(1, numEntry_);
19100      }
19101      size += getUnknownFields().getSerializedSize();
19102      memoizedSerializedSize = size;
19103      return size;
19104    }
19105
19106    private static final long serialVersionUID = 0L;
19107    @java.lang.Override
19108    protected java.lang.Object writeReplace()
19109        throws java.io.ObjectStreamException {
19110      return super.writeReplace();
19111    }
19112
19113    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19114        com.google.protobuf.ByteString data)
19115        throws com.google.protobuf.InvalidProtocolBufferException {
19116      return PARSER.parseFrom(data);
19117    }
19118    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19119        com.google.protobuf.ByteString data,
19120        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19121        throws com.google.protobuf.InvalidProtocolBufferException {
19122      return PARSER.parseFrom(data, extensionRegistry);
19123    }
19124    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(byte[] data)
19125        throws com.google.protobuf.InvalidProtocolBufferException {
19126      return PARSER.parseFrom(data);
19127    }
19128    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19129        byte[] data,
19130        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19131        throws com.google.protobuf.InvalidProtocolBufferException {
19132      return PARSER.parseFrom(data, extensionRegistry);
19133    }
19134    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(java.io.InputStream input)
19135        throws java.io.IOException {
19136      return PARSER.parseFrom(input);
19137    }
19138    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19139        java.io.InputStream input,
19140        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19141        throws java.io.IOException {
19142      return PARSER.parseFrom(input, extensionRegistry);
19143    }
19144    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseDelimitedFrom(java.io.InputStream input)
19145        throws java.io.IOException {
19146      return PARSER.parseDelimitedFrom(input);
19147    }
19148    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseDelimitedFrom(
19149        java.io.InputStream input,
19150        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19151        throws java.io.IOException {
19152      return PARSER.parseDelimitedFrom(input, extensionRegistry);
19153    }
19154    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19155        com.google.protobuf.CodedInputStream input)
19156        throws java.io.IOException {
19157      return PARSER.parseFrom(input);
19158    }
19159    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parseFrom(
19160        com.google.protobuf.CodedInputStream input,
19161        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19162        throws java.io.IOException {
19163      return PARSER.parseFrom(input, extensionRegistry);
19164    }
19165
19166    public static Builder newBuilder() { return Builder.create(); }
19167    public Builder newBuilderForType() { return newBuilder(); }
19168    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection prototype) {
19169      return newBuilder().mergeFrom(prototype);
19170    }
19171    public Builder toBuilder() { return newBuilder(this); }
19172
19173    @java.lang.Override
19174    protected Builder newBuilderForType(
19175        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19176      Builder builder = new Builder(parent);
19177      return builder;
19178    }
19179    /**
19180     * Protobuf type {@code hadoop.hdfs.fsimage.StringTableSection}
19181     *
19182     * <pre>
19183     **
19184     * This section maps string to id
19185     * NAME: STRING_TABLE
19186     * </pre>
19187     */
19188    public static final class Builder extends
19189        com.google.protobuf.GeneratedMessage.Builder<Builder>
19190       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSectionOrBuilder {
19191      public static final com.google.protobuf.Descriptors.Descriptor
19192          getDescriptor() {
19193        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
19194      }
19195
19196      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19197          internalGetFieldAccessorTable() {
19198        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable
19199            .ensureFieldAccessorsInitialized(
19200                org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.Builder.class);
19201      }
19202
19203      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.newBuilder()
19204      private Builder() {
19205        maybeForceBuilderInitialization();
19206      }
19207
19208      private Builder(
19209          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19210        super(parent);
19211        maybeForceBuilderInitialization();
19212      }
19213      private void maybeForceBuilderInitialization() {
19214        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19215        }
19216      }
19217      private static Builder create() {
19218        return new Builder();
19219      }
19220
19221      public Builder clear() {
19222        super.clear();
19223        numEntry_ = 0;
19224        bitField0_ = (bitField0_ & ~0x00000001);
19225        return this;
19226      }
19227
19228      public Builder clone() {
19229        return create().mergeFrom(buildPartial());
19230      }
19231
19232      public com.google.protobuf.Descriptors.Descriptor
19233          getDescriptorForType() {
19234        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
19235      }
19236
19237      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection getDefaultInstanceForType() {
19238        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.getDefaultInstance();
19239      }
19240
19241      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection build() {
19242        org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection result = buildPartial();
19243        if (!result.isInitialized()) {
19244          throw newUninitializedMessageException(result);
19245        }
19246        return result;
19247      }
19248
19249      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection buildPartial() {
19250        org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection(this);
19251        int from_bitField0_ = bitField0_;
19252        int to_bitField0_ = 0;
19253        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
19254          to_bitField0_ |= 0x00000001;
19255        }
19256        result.numEntry_ = numEntry_;
19257        result.bitField0_ = to_bitField0_;
19258        onBuilt();
19259        return result;
19260      }
19261
19262      public Builder mergeFrom(com.google.protobuf.Message other) {
19263        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection) {
19264          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection)other);
19265        } else {
19266          super.mergeFrom(other);
19267          return this;
19268        }
19269      }
19270
19271      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection other) {
19272        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection.getDefaultInstance()) return this;
19273        if (other.hasNumEntry()) {
19274          setNumEntry(other.getNumEntry());
19275        }
19276        this.mergeUnknownFields(other.getUnknownFields());
19277        return this;
19278      }
19279
19280      public final boolean isInitialized() {
19281        return true;
19282      }
19283
19284      public Builder mergeFrom(
19285          com.google.protobuf.CodedInputStream input,
19286          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19287          throws java.io.IOException {
19288        org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection parsedMessage = null;
19289        try {
19290          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
19291        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19292          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection) e.getUnfinishedMessage();
19293          throw e;
19294        } finally {
19295          if (parsedMessage != null) {
19296            mergeFrom(parsedMessage);
19297          }
19298        }
19299        return this;
19300      }
19301      private int bitField0_;
19302
19303      // optional uint32 numEntry = 1;
19304      private int numEntry_ ;
19305      /**
19306       * <code>optional uint32 numEntry = 1;</code>
19307       *
19308       * <pre>
19309       * repeated Entry
19310       * </pre>
19311       */
19312      public boolean hasNumEntry() {
19313        return ((bitField0_ & 0x00000001) == 0x00000001);
19314      }
19315      /**
19316       * <code>optional uint32 numEntry = 1;</code>
19317       *
19318       * <pre>
19319       * repeated Entry
19320       * </pre>
19321       */
19322      public int getNumEntry() {
19323        return numEntry_;
19324      }
19325      /**
19326       * <code>optional uint32 numEntry = 1;</code>
19327       *
19328       * <pre>
19329       * repeated Entry
19330       * </pre>
19331       */
19332      public Builder setNumEntry(int value) {
19333        bitField0_ |= 0x00000001;
19334        numEntry_ = value;
19335        onChanged();
19336        return this;
19337      }
19338      /**
19339       * <code>optional uint32 numEntry = 1;</code>
19340       *
19341       * <pre>
19342       * repeated Entry
19343       * </pre>
19344       */
19345      public Builder clearNumEntry() {
19346        bitField0_ = (bitField0_ & ~0x00000001);
19347        numEntry_ = 0;
19348        onChanged();
19349        return this;
19350      }
19351
19352      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.StringTableSection)
19353    }
19354
19355    static {
19356      defaultInstance = new StringTableSection(true);
19357      defaultInstance.initFields();
19358    }
19359
19360    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.StringTableSection)
19361  }
19362
19363  public interface SecretManagerSectionOrBuilder
19364      extends com.google.protobuf.MessageOrBuilder {
19365
19366    // optional uint32 currentId = 1;
19367    /**
19368     * <code>optional uint32 currentId = 1;</code>
19369     */
19370    boolean hasCurrentId();
19371    /**
19372     * <code>optional uint32 currentId = 1;</code>
19373     */
19374    int getCurrentId();
19375
19376    // optional uint32 tokenSequenceNumber = 2;
19377    /**
19378     * <code>optional uint32 tokenSequenceNumber = 2;</code>
19379     */
19380    boolean hasTokenSequenceNumber();
19381    /**
19382     * <code>optional uint32 tokenSequenceNumber = 2;</code>
19383     */
19384    int getTokenSequenceNumber();
19385
19386    // optional uint32 numKeys = 3;
19387    /**
19388     * <code>optional uint32 numKeys = 3;</code>
19389     */
19390    boolean hasNumKeys();
19391    /**
19392     * <code>optional uint32 numKeys = 3;</code>
19393     */
19394    int getNumKeys();
19395
19396    // optional uint32 numTokens = 4;
19397    /**
19398     * <code>optional uint32 numTokens = 4;</code>
19399     *
19400     * <pre>
19401     * repeated DelegationKey keys
19402     * repeated PersistToken tokens
19403     * </pre>
19404     */
19405    boolean hasNumTokens();
19406    /**
19407     * <code>optional uint32 numTokens = 4;</code>
19408     *
19409     * <pre>
19410     * repeated DelegationKey keys
19411     * repeated PersistToken tokens
19412     * </pre>
19413     */
19414    int getNumTokens();
19415  }
19416  /**
19417   * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection}
19418   */
19419  public static final class SecretManagerSection extends
19420      com.google.protobuf.GeneratedMessage
19421      implements SecretManagerSectionOrBuilder {
19422    // Use SecretManagerSection.newBuilder() to construct.
19423    private SecretManagerSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
19424      super(builder);
19425      this.unknownFields = builder.getUnknownFields();
19426    }
19427    private SecretManagerSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
19428
19429    private static final SecretManagerSection defaultInstance;
19430    public static SecretManagerSection getDefaultInstance() {
19431      return defaultInstance;
19432    }
19433
19434    public SecretManagerSection getDefaultInstanceForType() {
19435      return defaultInstance;
19436    }
19437
19438    private final com.google.protobuf.UnknownFieldSet unknownFields;
19439    @java.lang.Override
19440    public final com.google.protobuf.UnknownFieldSet
19441        getUnknownFields() {
19442      return this.unknownFields;
19443    }
19444    private SecretManagerSection(
19445        com.google.protobuf.CodedInputStream input,
19446        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19447        throws com.google.protobuf.InvalidProtocolBufferException {
19448      initFields();
19449      int mutable_bitField0_ = 0;
19450      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
19451          com.google.protobuf.UnknownFieldSet.newBuilder();
19452      try {
19453        boolean done = false;
19454        while (!done) {
19455          int tag = input.readTag();
19456          switch (tag) {
19457            case 0:
19458              done = true;
19459              break;
19460            default: {
19461              if (!parseUnknownField(input, unknownFields,
19462                                     extensionRegistry, tag)) {
19463                done = true;
19464              }
19465              break;
19466            }
19467            case 8: {
19468              bitField0_ |= 0x00000001;
19469              currentId_ = input.readUInt32();
19470              break;
19471            }
19472            case 16: {
19473              bitField0_ |= 0x00000002;
19474              tokenSequenceNumber_ = input.readUInt32();
19475              break;
19476            }
19477            case 24: {
19478              bitField0_ |= 0x00000004;
19479              numKeys_ = input.readUInt32();
19480              break;
19481            }
19482            case 32: {
19483              bitField0_ |= 0x00000008;
19484              numTokens_ = input.readUInt32();
19485              break;
19486            }
19487          }
19488        }
19489      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19490        throw e.setUnfinishedMessage(this);
19491      } catch (java.io.IOException e) {
19492        throw new com.google.protobuf.InvalidProtocolBufferException(
19493            e.getMessage()).setUnfinishedMessage(this);
19494      } finally {
19495        this.unknownFields = unknownFields.build();
19496        makeExtensionsImmutable();
19497      }
19498    }
19499    public static final com.google.protobuf.Descriptors.Descriptor
19500        getDescriptor() {
19501      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
19502    }
19503
19504    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19505        internalGetFieldAccessorTable() {
19506      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable
19507          .ensureFieldAccessorsInitialized(
19508              org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.Builder.class);
19509    }
19510
19511    public static com.google.protobuf.Parser<SecretManagerSection> PARSER =
19512        new com.google.protobuf.AbstractParser<SecretManagerSection>() {
19513      public SecretManagerSection parsePartialFrom(
19514          com.google.protobuf.CodedInputStream input,
19515          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19516          throws com.google.protobuf.InvalidProtocolBufferException {
19517        return new SecretManagerSection(input, extensionRegistry);
19518      }
19519    };
19520
19521    @java.lang.Override
19522    public com.google.protobuf.Parser<SecretManagerSection> getParserForType() {
19523      return PARSER;
19524    }
19525
19526    public interface DelegationKeyOrBuilder
19527        extends com.google.protobuf.MessageOrBuilder {
19528
19529      // optional uint32 id = 1;
19530      /**
19531       * <code>optional uint32 id = 1;</code>
19532       */
19533      boolean hasId();
19534      /**
19535       * <code>optional uint32 id = 1;</code>
19536       */
19537      int getId();
19538
19539      // optional uint64 expiryDate = 2;
19540      /**
19541       * <code>optional uint64 expiryDate = 2;</code>
19542       */
19543      boolean hasExpiryDate();
19544      /**
19545       * <code>optional uint64 expiryDate = 2;</code>
19546       */
19547      long getExpiryDate();
19548
19549      // optional bytes key = 3;
19550      /**
19551       * <code>optional bytes key = 3;</code>
19552       */
19553      boolean hasKey();
19554      /**
19555       * <code>optional bytes key = 3;</code>
19556       */
19557      com.google.protobuf.ByteString getKey();
19558    }
19559    /**
19560     * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey}
19561     */
19562    public static final class DelegationKey extends
19563        com.google.protobuf.GeneratedMessage
19564        implements DelegationKeyOrBuilder {
19565      // Use DelegationKey.newBuilder() to construct.
19566      private DelegationKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
19567        super(builder);
19568        this.unknownFields = builder.getUnknownFields();
19569      }
19570      private DelegationKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
19571
19572      private static final DelegationKey defaultInstance;
19573      public static DelegationKey getDefaultInstance() {
19574        return defaultInstance;
19575      }
19576
19577      public DelegationKey getDefaultInstanceForType() {
19578        return defaultInstance;
19579      }
19580
19581      private final com.google.protobuf.UnknownFieldSet unknownFields;
19582      @java.lang.Override
19583      public final com.google.protobuf.UnknownFieldSet
19584          getUnknownFields() {
19585        return this.unknownFields;
19586      }
19587      private DelegationKey(
19588          com.google.protobuf.CodedInputStream input,
19589          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19590          throws com.google.protobuf.InvalidProtocolBufferException {
19591        initFields();
19592        int mutable_bitField0_ = 0;
19593        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
19594            com.google.protobuf.UnknownFieldSet.newBuilder();
19595        try {
19596          boolean done = false;
19597          while (!done) {
19598            int tag = input.readTag();
19599            switch (tag) {
19600              case 0:
19601                done = true;
19602                break;
19603              default: {
19604                if (!parseUnknownField(input, unknownFields,
19605                                       extensionRegistry, tag)) {
19606                  done = true;
19607                }
19608                break;
19609              }
19610              case 8: {
19611                bitField0_ |= 0x00000001;
19612                id_ = input.readUInt32();
19613                break;
19614              }
19615              case 16: {
19616                bitField0_ |= 0x00000002;
19617                expiryDate_ = input.readUInt64();
19618                break;
19619              }
19620              case 26: {
19621                bitField0_ |= 0x00000004;
19622                key_ = input.readBytes();
19623                break;
19624              }
19625            }
19626          }
19627        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19628          throw e.setUnfinishedMessage(this);
19629        } catch (java.io.IOException e) {
19630          throw new com.google.protobuf.InvalidProtocolBufferException(
19631              e.getMessage()).setUnfinishedMessage(this);
19632        } finally {
19633          this.unknownFields = unknownFields.build();
19634          makeExtensionsImmutable();
19635        }
19636      }
19637      public static final com.google.protobuf.Descriptors.Descriptor
19638          getDescriptor() {
19639        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
19640      }
19641
19642      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19643          internalGetFieldAccessorTable() {
19644        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable
19645            .ensureFieldAccessorsInitialized(
19646                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.Builder.class);
19647      }
19648
19649      public static com.google.protobuf.Parser<DelegationKey> PARSER =
19650          new com.google.protobuf.AbstractParser<DelegationKey>() {
19651        public DelegationKey parsePartialFrom(
19652            com.google.protobuf.CodedInputStream input,
19653            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19654            throws com.google.protobuf.InvalidProtocolBufferException {
19655          return new DelegationKey(input, extensionRegistry);
19656        }
19657      };
19658
19659      @java.lang.Override
19660      public com.google.protobuf.Parser<DelegationKey> getParserForType() {
19661        return PARSER;
19662      }
19663
19664      private int bitField0_;
19665      // optional uint32 id = 1;
19666      public static final int ID_FIELD_NUMBER = 1;
19667      private int id_;
19668      /**
19669       * <code>optional uint32 id = 1;</code>
19670       */
19671      public boolean hasId() {
19672        return ((bitField0_ & 0x00000001) == 0x00000001);
19673      }
19674      /**
19675       * <code>optional uint32 id = 1;</code>
19676       */
19677      public int getId() {
19678        return id_;
19679      }
19680
19681      // optional uint64 expiryDate = 2;
19682      public static final int EXPIRYDATE_FIELD_NUMBER = 2;
19683      private long expiryDate_;
19684      /**
19685       * <code>optional uint64 expiryDate = 2;</code>
19686       */
19687      public boolean hasExpiryDate() {
19688        return ((bitField0_ & 0x00000002) == 0x00000002);
19689      }
19690      /**
19691       * <code>optional uint64 expiryDate = 2;</code>
19692       */
19693      public long getExpiryDate() {
19694        return expiryDate_;
19695      }
19696
19697      // optional bytes key = 3;
19698      public static final int KEY_FIELD_NUMBER = 3;
19699      private com.google.protobuf.ByteString key_;
19700      /**
19701       * <code>optional bytes key = 3;</code>
19702       */
19703      public boolean hasKey() {
19704        return ((bitField0_ & 0x00000004) == 0x00000004);
19705      }
19706      /**
19707       * <code>optional bytes key = 3;</code>
19708       */
19709      public com.google.protobuf.ByteString getKey() {
19710        return key_;
19711      }
19712
19713      private void initFields() {
19714        id_ = 0;
19715        expiryDate_ = 0L;
19716        key_ = com.google.protobuf.ByteString.EMPTY;
19717      }
19718      private byte memoizedIsInitialized = -1;
19719      public final boolean isInitialized() {
19720        byte isInitialized = memoizedIsInitialized;
19721        if (isInitialized != -1) return isInitialized == 1;
19722
19723        memoizedIsInitialized = 1;
19724        return true;
19725      }
19726
19727      public void writeTo(com.google.protobuf.CodedOutputStream output)
19728                          throws java.io.IOException {
19729        getSerializedSize();
19730        if (((bitField0_ & 0x00000001) == 0x00000001)) {
19731          output.writeUInt32(1, id_);
19732        }
19733        if (((bitField0_ & 0x00000002) == 0x00000002)) {
19734          output.writeUInt64(2, expiryDate_);
19735        }
19736        if (((bitField0_ & 0x00000004) == 0x00000004)) {
19737          output.writeBytes(3, key_);
19738        }
19739        getUnknownFields().writeTo(output);
19740      }
19741
19742      private int memoizedSerializedSize = -1;
19743      public int getSerializedSize() {
19744        int size = memoizedSerializedSize;
19745        if (size != -1) return size;
19746
19747        size = 0;
19748        if (((bitField0_ & 0x00000001) == 0x00000001)) {
19749          size += com.google.protobuf.CodedOutputStream
19750            .computeUInt32Size(1, id_);
19751        }
19752        if (((bitField0_ & 0x00000002) == 0x00000002)) {
19753          size += com.google.protobuf.CodedOutputStream
19754            .computeUInt64Size(2, expiryDate_);
19755        }
19756        if (((bitField0_ & 0x00000004) == 0x00000004)) {
19757          size += com.google.protobuf.CodedOutputStream
19758            .computeBytesSize(3, key_);
19759        }
19760        size += getUnknownFields().getSerializedSize();
19761        memoizedSerializedSize = size;
19762        return size;
19763      }
19764
19765      private static final long serialVersionUID = 0L;
19766      @java.lang.Override
19767      protected java.lang.Object writeReplace()
19768          throws java.io.ObjectStreamException {
19769        return super.writeReplace();
19770      }
19771
19772      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19773          com.google.protobuf.ByteString data)
19774          throws com.google.protobuf.InvalidProtocolBufferException {
19775        return PARSER.parseFrom(data);
19776      }
19777      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19778          com.google.protobuf.ByteString data,
19779          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19780          throws com.google.protobuf.InvalidProtocolBufferException {
19781        return PARSER.parseFrom(data, extensionRegistry);
19782      }
19783      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(byte[] data)
19784          throws com.google.protobuf.InvalidProtocolBufferException {
19785        return PARSER.parseFrom(data);
19786      }
19787      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19788          byte[] data,
19789          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19790          throws com.google.protobuf.InvalidProtocolBufferException {
19791        return PARSER.parseFrom(data, extensionRegistry);
19792      }
19793      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(java.io.InputStream input)
19794          throws java.io.IOException {
19795        return PARSER.parseFrom(input);
19796      }
19797      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19798          java.io.InputStream input,
19799          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19800          throws java.io.IOException {
19801        return PARSER.parseFrom(input, extensionRegistry);
19802      }
19803      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseDelimitedFrom(java.io.InputStream input)
19804          throws java.io.IOException {
19805        return PARSER.parseDelimitedFrom(input);
19806      }
19807      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseDelimitedFrom(
19808          java.io.InputStream input,
19809          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19810          throws java.io.IOException {
19811        return PARSER.parseDelimitedFrom(input, extensionRegistry);
19812      }
19813      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19814          com.google.protobuf.CodedInputStream input)
19815          throws java.io.IOException {
19816        return PARSER.parseFrom(input);
19817      }
19818      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parseFrom(
19819          com.google.protobuf.CodedInputStream input,
19820          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19821          throws java.io.IOException {
19822        return PARSER.parseFrom(input, extensionRegistry);
19823      }
19824
19825      public static Builder newBuilder() { return Builder.create(); }
19826      public Builder newBuilderForType() { return newBuilder(); }
19827      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey prototype) {
19828        return newBuilder().mergeFrom(prototype);
19829      }
19830      public Builder toBuilder() { return newBuilder(this); }
19831
19832      @java.lang.Override
19833      protected Builder newBuilderForType(
19834          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19835        Builder builder = new Builder(parent);
19836        return builder;
19837      }
19838      /**
19839       * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey}
19840       */
19841      public static final class Builder extends
19842          com.google.protobuf.GeneratedMessage.Builder<Builder>
19843         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKeyOrBuilder {
19844        public static final com.google.protobuf.Descriptors.Descriptor
19845            getDescriptor() {
19846          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
19847        }
19848
19849        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19850            internalGetFieldAccessorTable() {
19851          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable
19852              .ensureFieldAccessorsInitialized(
19853                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.Builder.class);
19854        }
19855
19856        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.newBuilder()
19857        private Builder() {
19858          maybeForceBuilderInitialization();
19859        }
19860
19861        private Builder(
19862            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19863          super(parent);
19864          maybeForceBuilderInitialization();
19865        }
19866        private void maybeForceBuilderInitialization() {
19867          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19868          }
19869        }
19870        private static Builder create() {
19871          return new Builder();
19872        }
19873
19874        public Builder clear() {
19875          super.clear();
19876          id_ = 0;
19877          bitField0_ = (bitField0_ & ~0x00000001);
19878          expiryDate_ = 0L;
19879          bitField0_ = (bitField0_ & ~0x00000002);
19880          key_ = com.google.protobuf.ByteString.EMPTY;
19881          bitField0_ = (bitField0_ & ~0x00000004);
19882          return this;
19883        }
19884
19885        public Builder clone() {
19886          return create().mergeFrom(buildPartial());
19887        }
19888
19889        public com.google.protobuf.Descriptors.Descriptor
19890            getDescriptorForType() {
19891          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
19892        }
19893
19894        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey getDefaultInstanceForType() {
19895          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.getDefaultInstance();
19896        }
19897
19898        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey build() {
19899          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey result = buildPartial();
19900          if (!result.isInitialized()) {
19901            throw newUninitializedMessageException(result);
19902          }
19903          return result;
19904        }
19905
19906        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey buildPartial() {
19907          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey(this);
19908          int from_bitField0_ = bitField0_;
19909          int to_bitField0_ = 0;
19910          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
19911            to_bitField0_ |= 0x00000001;
19912          }
19913          result.id_ = id_;
19914          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
19915            to_bitField0_ |= 0x00000002;
19916          }
19917          result.expiryDate_ = expiryDate_;
19918          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
19919            to_bitField0_ |= 0x00000004;
19920          }
19921          result.key_ = key_;
19922          result.bitField0_ = to_bitField0_;
19923          onBuilt();
19924          return result;
19925        }
19926
19927        public Builder mergeFrom(com.google.protobuf.Message other) {
19928          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey) {
19929            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey)other);
19930          } else {
19931            super.mergeFrom(other);
19932            return this;
19933          }
19934        }
19935
19936        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey other) {
19937          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey.getDefaultInstance()) return this;
19938          if (other.hasId()) {
19939            setId(other.getId());
19940          }
19941          if (other.hasExpiryDate()) {
19942            setExpiryDate(other.getExpiryDate());
19943          }
19944          if (other.hasKey()) {
19945            setKey(other.getKey());
19946          }
19947          this.mergeUnknownFields(other.getUnknownFields());
19948          return this;
19949        }
19950
19951        public final boolean isInitialized() {
19952          return true;
19953        }
19954
19955        public Builder mergeFrom(
19956            com.google.protobuf.CodedInputStream input,
19957            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19958            throws java.io.IOException {
19959          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey parsedMessage = null;
19960          try {
19961            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
19962          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19963            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.DelegationKey) e.getUnfinishedMessage();
19964            throw e;
19965          } finally {
19966            if (parsedMessage != null) {
19967              mergeFrom(parsedMessage);
19968            }
19969          }
19970          return this;
19971        }
19972        private int bitField0_;
19973
19974        // optional uint32 id = 1;
19975        private int id_ ;
19976        /**
19977         * <code>optional uint32 id = 1;</code>
19978         */
19979        public boolean hasId() {
19980          return ((bitField0_ & 0x00000001) == 0x00000001);
19981        }
19982        /**
19983         * <code>optional uint32 id = 1;</code>
19984         */
19985        public int getId() {
19986          return id_;
19987        }
19988        /**
19989         * <code>optional uint32 id = 1;</code>
19990         */
19991        public Builder setId(int value) {
19992          bitField0_ |= 0x00000001;
19993          id_ = value;
19994          onChanged();
19995          return this;
19996        }
19997        /**
19998         * <code>optional uint32 id = 1;</code>
19999         */
20000        public Builder clearId() {
20001          bitField0_ = (bitField0_ & ~0x00000001);
20002          id_ = 0;
20003          onChanged();
20004          return this;
20005        }
20006
20007        // optional uint64 expiryDate = 2;
20008        private long expiryDate_ ;
20009        /**
20010         * <code>optional uint64 expiryDate = 2;</code>
20011         */
20012        public boolean hasExpiryDate() {
20013          return ((bitField0_ & 0x00000002) == 0x00000002);
20014        }
20015        /**
20016         * <code>optional uint64 expiryDate = 2;</code>
20017         */
20018        public long getExpiryDate() {
20019          return expiryDate_;
20020        }
20021        /**
20022         * <code>optional uint64 expiryDate = 2;</code>
20023         */
20024        public Builder setExpiryDate(long value) {
20025          bitField0_ |= 0x00000002;
20026          expiryDate_ = value;
20027          onChanged();
20028          return this;
20029        }
20030        /**
20031         * <code>optional uint64 expiryDate = 2;</code>
20032         */
20033        public Builder clearExpiryDate() {
20034          bitField0_ = (bitField0_ & ~0x00000002);
20035          expiryDate_ = 0L;
20036          onChanged();
20037          return this;
20038        }
20039
20040        // optional bytes key = 3;
20041        private com.google.protobuf.ByteString key_ = com.google.protobuf.ByteString.EMPTY;
20042        /**
20043         * <code>optional bytes key = 3;</code>
20044         */
20045        public boolean hasKey() {
20046          return ((bitField0_ & 0x00000004) == 0x00000004);
20047        }
20048        /**
20049         * <code>optional bytes key = 3;</code>
20050         */
20051        public com.google.protobuf.ByteString getKey() {
20052          return key_;
20053        }
20054        /**
20055         * <code>optional bytes key = 3;</code>
20056         */
20057        public Builder setKey(com.google.protobuf.ByteString value) {
20058          if (value == null) {
20059    throw new NullPointerException();
20060  }
20061  bitField0_ |= 0x00000004;
20062          key_ = value;
20063          onChanged();
20064          return this;
20065        }
20066        /**
20067         * <code>optional bytes key = 3;</code>
20068         */
20069        public Builder clearKey() {
20070          bitField0_ = (bitField0_ & ~0x00000004);
20071          key_ = getDefaultInstance().getKey();
20072          onChanged();
20073          return this;
20074        }
20075
20076        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey)
20077      }
20078
20079      static {
20080        defaultInstance = new DelegationKey(true);
20081        defaultInstance.initFields();
20082      }
20083
20084      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SecretManagerSection.DelegationKey)
20085    }
20086
20087    public interface PersistTokenOrBuilder
20088        extends com.google.protobuf.MessageOrBuilder {
20089
20090      // optional uint32 version = 1;
20091      /**
20092       * <code>optional uint32 version = 1;</code>
20093       */
20094      boolean hasVersion();
20095      /**
20096       * <code>optional uint32 version = 1;</code>
20097       */
20098      int getVersion();
20099
20100      // optional string owner = 2;
20101      /**
20102       * <code>optional string owner = 2;</code>
20103       */
20104      boolean hasOwner();
20105      /**
20106       * <code>optional string owner = 2;</code>
20107       */
20108      java.lang.String getOwner();
20109      /**
20110       * <code>optional string owner = 2;</code>
20111       */
20112      com.google.protobuf.ByteString
20113          getOwnerBytes();
20114
20115      // optional string renewer = 3;
20116      /**
20117       * <code>optional string renewer = 3;</code>
20118       */
20119      boolean hasRenewer();
20120      /**
20121       * <code>optional string renewer = 3;</code>
20122       */
20123      java.lang.String getRenewer();
20124      /**
20125       * <code>optional string renewer = 3;</code>
20126       */
20127      com.google.protobuf.ByteString
20128          getRenewerBytes();
20129
20130      // optional string realUser = 4;
20131      /**
20132       * <code>optional string realUser = 4;</code>
20133       */
20134      boolean hasRealUser();
20135      /**
20136       * <code>optional string realUser = 4;</code>
20137       */
20138      java.lang.String getRealUser();
20139      /**
20140       * <code>optional string realUser = 4;</code>
20141       */
20142      com.google.protobuf.ByteString
20143          getRealUserBytes();
20144
20145      // optional uint64 issueDate = 5;
20146      /**
20147       * <code>optional uint64 issueDate = 5;</code>
20148       */
20149      boolean hasIssueDate();
20150      /**
20151       * <code>optional uint64 issueDate = 5;</code>
20152       */
20153      long getIssueDate();
20154
20155      // optional uint64 maxDate = 6;
20156      /**
20157       * <code>optional uint64 maxDate = 6;</code>
20158       */
20159      boolean hasMaxDate();
20160      /**
20161       * <code>optional uint64 maxDate = 6;</code>
20162       */
20163      long getMaxDate();
20164
20165      // optional uint32 sequenceNumber = 7;
20166      /**
20167       * <code>optional uint32 sequenceNumber = 7;</code>
20168       */
20169      boolean hasSequenceNumber();
20170      /**
20171       * <code>optional uint32 sequenceNumber = 7;</code>
20172       */
20173      int getSequenceNumber();
20174
20175      // optional uint32 masterKeyId = 8;
20176      /**
20177       * <code>optional uint32 masterKeyId = 8;</code>
20178       */
20179      boolean hasMasterKeyId();
20180      /**
20181       * <code>optional uint32 masterKeyId = 8;</code>
20182       */
20183      int getMasterKeyId();
20184
20185      // optional uint64 expiryDate = 9;
20186      /**
20187       * <code>optional uint64 expiryDate = 9;</code>
20188       */
20189      boolean hasExpiryDate();
20190      /**
20191       * <code>optional uint64 expiryDate = 9;</code>
20192       */
20193      long getExpiryDate();
20194    }
20195    /**
20196     * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.PersistToken}
20197     */
20198    public static final class PersistToken extends
20199        com.google.protobuf.GeneratedMessage
20200        implements PersistTokenOrBuilder {
20201      // Use PersistToken.newBuilder() to construct.
20202      private PersistToken(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
20203        super(builder);
20204        this.unknownFields = builder.getUnknownFields();
20205      }
20206      private PersistToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
20207
20208      private static final PersistToken defaultInstance;
20209      public static PersistToken getDefaultInstance() {
20210        return defaultInstance;
20211      }
20212
20213      public PersistToken getDefaultInstanceForType() {
20214        return defaultInstance;
20215      }
20216
20217      private final com.google.protobuf.UnknownFieldSet unknownFields;
20218      @java.lang.Override
20219      public final com.google.protobuf.UnknownFieldSet
20220          getUnknownFields() {
20221        return this.unknownFields;
20222      }
20223      private PersistToken(
20224          com.google.protobuf.CodedInputStream input,
20225          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20226          throws com.google.protobuf.InvalidProtocolBufferException {
20227        initFields();
20228        int mutable_bitField0_ = 0;
20229        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
20230            com.google.protobuf.UnknownFieldSet.newBuilder();
20231        try {
20232          boolean done = false;
20233          while (!done) {
20234            int tag = input.readTag();
20235            switch (tag) {
20236              case 0:
20237                done = true;
20238                break;
20239              default: {
20240                if (!parseUnknownField(input, unknownFields,
20241                                       extensionRegistry, tag)) {
20242                  done = true;
20243                }
20244                break;
20245              }
20246              case 8: {
20247                bitField0_ |= 0x00000001;
20248                version_ = input.readUInt32();
20249                break;
20250              }
20251              case 18: {
20252                bitField0_ |= 0x00000002;
20253                owner_ = input.readBytes();
20254                break;
20255              }
20256              case 26: {
20257                bitField0_ |= 0x00000004;
20258                renewer_ = input.readBytes();
20259                break;
20260              }
20261              case 34: {
20262                bitField0_ |= 0x00000008;
20263                realUser_ = input.readBytes();
20264                break;
20265              }
20266              case 40: {
20267                bitField0_ |= 0x00000010;
20268                issueDate_ = input.readUInt64();
20269                break;
20270              }
20271              case 48: {
20272                bitField0_ |= 0x00000020;
20273                maxDate_ = input.readUInt64();
20274                break;
20275              }
20276              case 56: {
20277                bitField0_ |= 0x00000040;
20278                sequenceNumber_ = input.readUInt32();
20279                break;
20280              }
20281              case 64: {
20282                bitField0_ |= 0x00000080;
20283                masterKeyId_ = input.readUInt32();
20284                break;
20285              }
20286              case 72: {
20287                bitField0_ |= 0x00000100;
20288                expiryDate_ = input.readUInt64();
20289                break;
20290              }
20291            }
20292          }
20293        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20294          throw e.setUnfinishedMessage(this);
20295        } catch (java.io.IOException e) {
20296          throw new com.google.protobuf.InvalidProtocolBufferException(
20297              e.getMessage()).setUnfinishedMessage(this);
20298        } finally {
20299          this.unknownFields = unknownFields.build();
20300          makeExtensionsImmutable();
20301        }
20302      }
20303      public static final com.google.protobuf.Descriptors.Descriptor
20304          getDescriptor() {
20305        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
20306      }
20307
20308      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20309          internalGetFieldAccessorTable() {
20310        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable
20311            .ensureFieldAccessorsInitialized(
20312                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.Builder.class);
20313      }
20314
20315      public static com.google.protobuf.Parser<PersistToken> PARSER =
20316          new com.google.protobuf.AbstractParser<PersistToken>() {
20317        public PersistToken parsePartialFrom(
20318            com.google.protobuf.CodedInputStream input,
20319            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20320            throws com.google.protobuf.InvalidProtocolBufferException {
20321          return new PersistToken(input, extensionRegistry);
20322        }
20323      };
20324
20325      @java.lang.Override
20326      public com.google.protobuf.Parser<PersistToken> getParserForType() {
20327        return PARSER;
20328      }
20329
20330      private int bitField0_;
20331      // optional uint32 version = 1;
20332      public static final int VERSION_FIELD_NUMBER = 1;
20333      private int version_;
20334      /**
20335       * <code>optional uint32 version = 1;</code>
20336       */
20337      public boolean hasVersion() {
20338        return ((bitField0_ & 0x00000001) == 0x00000001);
20339      }
20340      /**
20341       * <code>optional uint32 version = 1;</code>
20342       */
20343      public int getVersion() {
20344        return version_;
20345      }
20346
20347      // optional string owner = 2;
20348      public static final int OWNER_FIELD_NUMBER = 2;
20349      private java.lang.Object owner_;
20350      /**
20351       * <code>optional string owner = 2;</code>
20352       */
20353      public boolean hasOwner() {
20354        return ((bitField0_ & 0x00000002) == 0x00000002);
20355      }
20356      /**
20357       * <code>optional string owner = 2;</code>
20358       */
20359      public java.lang.String getOwner() {
20360        java.lang.Object ref = owner_;
20361        if (ref instanceof java.lang.String) {
20362          return (java.lang.String) ref;
20363        } else {
20364          com.google.protobuf.ByteString bs = 
20365              (com.google.protobuf.ByteString) ref;
20366          java.lang.String s = bs.toStringUtf8();
20367          if (bs.isValidUtf8()) {
20368            owner_ = s;
20369          }
20370          return s;
20371        }
20372      }
20373      /**
20374       * <code>optional string owner = 2;</code>
20375       */
20376      public com.google.protobuf.ByteString
20377          getOwnerBytes() {
20378        java.lang.Object ref = owner_;
20379        if (ref instanceof java.lang.String) {
20380          com.google.protobuf.ByteString b = 
20381              com.google.protobuf.ByteString.copyFromUtf8(
20382                  (java.lang.String) ref);
20383          owner_ = b;
20384          return b;
20385        } else {
20386          return (com.google.protobuf.ByteString) ref;
20387        }
20388      }
20389
20390      // optional string renewer = 3;
20391      public static final int RENEWER_FIELD_NUMBER = 3;
20392      private java.lang.Object renewer_;
20393      /**
20394       * <code>optional string renewer = 3;</code>
20395       */
20396      public boolean hasRenewer() {
20397        return ((bitField0_ & 0x00000004) == 0x00000004);
20398      }
20399      /**
20400       * <code>optional string renewer = 3;</code>
20401       */
20402      public java.lang.String getRenewer() {
20403        java.lang.Object ref = renewer_;
20404        if (ref instanceof java.lang.String) {
20405          return (java.lang.String) ref;
20406        } else {
20407          com.google.protobuf.ByteString bs = 
20408              (com.google.protobuf.ByteString) ref;
20409          java.lang.String s = bs.toStringUtf8();
20410          if (bs.isValidUtf8()) {
20411            renewer_ = s;
20412          }
20413          return s;
20414        }
20415      }
20416      /**
20417       * <code>optional string renewer = 3;</code>
20418       */
20419      public com.google.protobuf.ByteString
20420          getRenewerBytes() {
20421        java.lang.Object ref = renewer_;
20422        if (ref instanceof java.lang.String) {
20423          com.google.protobuf.ByteString b = 
20424              com.google.protobuf.ByteString.copyFromUtf8(
20425                  (java.lang.String) ref);
20426          renewer_ = b;
20427          return b;
20428        } else {
20429          return (com.google.protobuf.ByteString) ref;
20430        }
20431      }
20432
20433      // optional string realUser = 4;
20434      public static final int REALUSER_FIELD_NUMBER = 4;
20435      private java.lang.Object realUser_;
20436      /**
20437       * <code>optional string realUser = 4;</code>
20438       */
20439      public boolean hasRealUser() {
20440        return ((bitField0_ & 0x00000008) == 0x00000008);
20441      }
20442      /**
20443       * <code>optional string realUser = 4;</code>
20444       */
20445      public java.lang.String getRealUser() {
20446        java.lang.Object ref = realUser_;
20447        if (ref instanceof java.lang.String) {
20448          return (java.lang.String) ref;
20449        } else {
20450          com.google.protobuf.ByteString bs = 
20451              (com.google.protobuf.ByteString) ref;
20452          java.lang.String s = bs.toStringUtf8();
20453          if (bs.isValidUtf8()) {
20454            realUser_ = s;
20455          }
20456          return s;
20457        }
20458      }
20459      /**
20460       * <code>optional string realUser = 4;</code>
20461       */
20462      public com.google.protobuf.ByteString
20463          getRealUserBytes() {
20464        java.lang.Object ref = realUser_;
20465        if (ref instanceof java.lang.String) {
20466          com.google.protobuf.ByteString b = 
20467              com.google.protobuf.ByteString.copyFromUtf8(
20468                  (java.lang.String) ref);
20469          realUser_ = b;
20470          return b;
20471        } else {
20472          return (com.google.protobuf.ByteString) ref;
20473        }
20474      }
20475
20476      // optional uint64 issueDate = 5;
20477      public static final int ISSUEDATE_FIELD_NUMBER = 5;
20478      private long issueDate_;
20479      /**
20480       * <code>optional uint64 issueDate = 5;</code>
20481       */
20482      public boolean hasIssueDate() {
20483        return ((bitField0_ & 0x00000010) == 0x00000010);
20484      }
20485      /**
20486       * <code>optional uint64 issueDate = 5;</code>
20487       */
20488      public long getIssueDate() {
20489        return issueDate_;
20490      }
20491
20492      // optional uint64 maxDate = 6;
20493      public static final int MAXDATE_FIELD_NUMBER = 6;
20494      private long maxDate_;
20495      /**
20496       * <code>optional uint64 maxDate = 6;</code>
20497       */
20498      public boolean hasMaxDate() {
20499        return ((bitField0_ & 0x00000020) == 0x00000020);
20500      }
20501      /**
20502       * <code>optional uint64 maxDate = 6;</code>
20503       */
20504      public long getMaxDate() {
20505        return maxDate_;
20506      }
20507
20508      // optional uint32 sequenceNumber = 7;
20509      public static final int SEQUENCENUMBER_FIELD_NUMBER = 7;
20510      private int sequenceNumber_;
20511      /**
20512       * <code>optional uint32 sequenceNumber = 7;</code>
20513       */
20514      public boolean hasSequenceNumber() {
20515        return ((bitField0_ & 0x00000040) == 0x00000040);
20516      }
20517      /**
20518       * <code>optional uint32 sequenceNumber = 7;</code>
20519       */
20520      public int getSequenceNumber() {
20521        return sequenceNumber_;
20522      }
20523
20524      // optional uint32 masterKeyId = 8;
20525      public static final int MASTERKEYID_FIELD_NUMBER = 8;
20526      private int masterKeyId_;
20527      /**
20528       * <code>optional uint32 masterKeyId = 8;</code>
20529       */
20530      public boolean hasMasterKeyId() {
20531        return ((bitField0_ & 0x00000080) == 0x00000080);
20532      }
20533      /**
20534       * <code>optional uint32 masterKeyId = 8;</code>
20535       */
20536      public int getMasterKeyId() {
20537        return masterKeyId_;
20538      }
20539
20540      // optional uint64 expiryDate = 9;
20541      public static final int EXPIRYDATE_FIELD_NUMBER = 9;
20542      private long expiryDate_;
20543      /**
20544       * <code>optional uint64 expiryDate = 9;</code>
20545       */
20546      public boolean hasExpiryDate() {
20547        return ((bitField0_ & 0x00000100) == 0x00000100);
20548      }
20549      /**
20550       * <code>optional uint64 expiryDate = 9;</code>
20551       */
20552      public long getExpiryDate() {
20553        return expiryDate_;
20554      }
20555
20556      private void initFields() {
20557        version_ = 0;
20558        owner_ = "";
20559        renewer_ = "";
20560        realUser_ = "";
20561        issueDate_ = 0L;
20562        maxDate_ = 0L;
20563        sequenceNumber_ = 0;
20564        masterKeyId_ = 0;
20565        expiryDate_ = 0L;
20566      }
20567      private byte memoizedIsInitialized = -1;
20568      public final boolean isInitialized() {
20569        byte isInitialized = memoizedIsInitialized;
20570        if (isInitialized != -1) return isInitialized == 1;
20571
20572        memoizedIsInitialized = 1;
20573        return true;
20574      }
20575
20576      public void writeTo(com.google.protobuf.CodedOutputStream output)
20577                          throws java.io.IOException {
20578        getSerializedSize();
20579        if (((bitField0_ & 0x00000001) == 0x00000001)) {
20580          output.writeUInt32(1, version_);
20581        }
20582        if (((bitField0_ & 0x00000002) == 0x00000002)) {
20583          output.writeBytes(2, getOwnerBytes());
20584        }
20585        if (((bitField0_ & 0x00000004) == 0x00000004)) {
20586          output.writeBytes(3, getRenewerBytes());
20587        }
20588        if (((bitField0_ & 0x00000008) == 0x00000008)) {
20589          output.writeBytes(4, getRealUserBytes());
20590        }
20591        if (((bitField0_ & 0x00000010) == 0x00000010)) {
20592          output.writeUInt64(5, issueDate_);
20593        }
20594        if (((bitField0_ & 0x00000020) == 0x00000020)) {
20595          output.writeUInt64(6, maxDate_);
20596        }
20597        if (((bitField0_ & 0x00000040) == 0x00000040)) {
20598          output.writeUInt32(7, sequenceNumber_);
20599        }
20600        if (((bitField0_ & 0x00000080) == 0x00000080)) {
20601          output.writeUInt32(8, masterKeyId_);
20602        }
20603        if (((bitField0_ & 0x00000100) == 0x00000100)) {
20604          output.writeUInt64(9, expiryDate_);
20605        }
20606        getUnknownFields().writeTo(output);
20607      }
20608
20609      private int memoizedSerializedSize = -1;
20610      public int getSerializedSize() {
20611        int size = memoizedSerializedSize;
20612        if (size != -1) return size;
20613
20614        size = 0;
20615        if (((bitField0_ & 0x00000001) == 0x00000001)) {
20616          size += com.google.protobuf.CodedOutputStream
20617            .computeUInt32Size(1, version_);
20618        }
20619        if (((bitField0_ & 0x00000002) == 0x00000002)) {
20620          size += com.google.protobuf.CodedOutputStream
20621            .computeBytesSize(2, getOwnerBytes());
20622        }
20623        if (((bitField0_ & 0x00000004) == 0x00000004)) {
20624          size += com.google.protobuf.CodedOutputStream
20625            .computeBytesSize(3, getRenewerBytes());
20626        }
20627        if (((bitField0_ & 0x00000008) == 0x00000008)) {
20628          size += com.google.protobuf.CodedOutputStream
20629            .computeBytesSize(4, getRealUserBytes());
20630        }
20631        if (((bitField0_ & 0x00000010) == 0x00000010)) {
20632          size += com.google.protobuf.CodedOutputStream
20633            .computeUInt64Size(5, issueDate_);
20634        }
20635        if (((bitField0_ & 0x00000020) == 0x00000020)) {
20636          size += com.google.protobuf.CodedOutputStream
20637            .computeUInt64Size(6, maxDate_);
20638        }
20639        if (((bitField0_ & 0x00000040) == 0x00000040)) {
20640          size += com.google.protobuf.CodedOutputStream
20641            .computeUInt32Size(7, sequenceNumber_);
20642        }
20643        if (((bitField0_ & 0x00000080) == 0x00000080)) {
20644          size += com.google.protobuf.CodedOutputStream
20645            .computeUInt32Size(8, masterKeyId_);
20646        }
20647        if (((bitField0_ & 0x00000100) == 0x00000100)) {
20648          size += com.google.protobuf.CodedOutputStream
20649            .computeUInt64Size(9, expiryDate_);
20650        }
20651        size += getUnknownFields().getSerializedSize();
20652        memoizedSerializedSize = size;
20653        return size;
20654      }
20655
20656      private static final long serialVersionUID = 0L;
20657      @java.lang.Override
20658      protected java.lang.Object writeReplace()
20659          throws java.io.ObjectStreamException {
20660        return super.writeReplace();
20661      }
20662
20663      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20664          com.google.protobuf.ByteString data)
20665          throws com.google.protobuf.InvalidProtocolBufferException {
20666        return PARSER.parseFrom(data);
20667      }
20668      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20669          com.google.protobuf.ByteString data,
20670          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20671          throws com.google.protobuf.InvalidProtocolBufferException {
20672        return PARSER.parseFrom(data, extensionRegistry);
20673      }
20674      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(byte[] data)
20675          throws com.google.protobuf.InvalidProtocolBufferException {
20676        return PARSER.parseFrom(data);
20677      }
20678      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20679          byte[] data,
20680          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20681          throws com.google.protobuf.InvalidProtocolBufferException {
20682        return PARSER.parseFrom(data, extensionRegistry);
20683      }
20684      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(java.io.InputStream input)
20685          throws java.io.IOException {
20686        return PARSER.parseFrom(input);
20687      }
20688      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20689          java.io.InputStream input,
20690          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20691          throws java.io.IOException {
20692        return PARSER.parseFrom(input, extensionRegistry);
20693      }
20694      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseDelimitedFrom(java.io.InputStream input)
20695          throws java.io.IOException {
20696        return PARSER.parseDelimitedFrom(input);
20697      }
20698      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseDelimitedFrom(
20699          java.io.InputStream input,
20700          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20701          throws java.io.IOException {
20702        return PARSER.parseDelimitedFrom(input, extensionRegistry);
20703      }
20704      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20705          com.google.protobuf.CodedInputStream input)
20706          throws java.io.IOException {
20707        return PARSER.parseFrom(input);
20708      }
20709      public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parseFrom(
20710          com.google.protobuf.CodedInputStream input,
20711          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20712          throws java.io.IOException {
20713        return PARSER.parseFrom(input, extensionRegistry);
20714      }
20715
20716      public static Builder newBuilder() { return Builder.create(); }
20717      public Builder newBuilderForType() { return newBuilder(); }
20718      public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken prototype) {
20719        return newBuilder().mergeFrom(prototype);
20720      }
20721      public Builder toBuilder() { return newBuilder(this); }
20722
20723      @java.lang.Override
20724      protected Builder newBuilderForType(
20725          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20726        Builder builder = new Builder(parent);
20727        return builder;
20728      }
20729      /**
20730       * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection.PersistToken}
20731       */
20732      public static final class Builder extends
20733          com.google.protobuf.GeneratedMessage.Builder<Builder>
20734         implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistTokenOrBuilder {
20735        public static final com.google.protobuf.Descriptors.Descriptor
20736            getDescriptor() {
20737          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
20738        }
20739
20740        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20741            internalGetFieldAccessorTable() {
20742          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable
20743              .ensureFieldAccessorsInitialized(
20744                  org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.Builder.class);
20745        }
20746
20747        // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.newBuilder()
20748        private Builder() {
20749          maybeForceBuilderInitialization();
20750        }
20751
20752        private Builder(
20753            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20754          super(parent);
20755          maybeForceBuilderInitialization();
20756        }
20757        private void maybeForceBuilderInitialization() {
20758          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
20759          }
20760        }
20761        private static Builder create() {
20762          return new Builder();
20763        }
20764
20765        public Builder clear() {
20766          super.clear();
20767          version_ = 0;
20768          bitField0_ = (bitField0_ & ~0x00000001);
20769          owner_ = "";
20770          bitField0_ = (bitField0_ & ~0x00000002);
20771          renewer_ = "";
20772          bitField0_ = (bitField0_ & ~0x00000004);
20773          realUser_ = "";
20774          bitField0_ = (bitField0_ & ~0x00000008);
20775          issueDate_ = 0L;
20776          bitField0_ = (bitField0_ & ~0x00000010);
20777          maxDate_ = 0L;
20778          bitField0_ = (bitField0_ & ~0x00000020);
20779          sequenceNumber_ = 0;
20780          bitField0_ = (bitField0_ & ~0x00000040);
20781          masterKeyId_ = 0;
20782          bitField0_ = (bitField0_ & ~0x00000080);
20783          expiryDate_ = 0L;
20784          bitField0_ = (bitField0_ & ~0x00000100);
20785          return this;
20786        }
20787
20788        public Builder clone() {
20789          return create().mergeFrom(buildPartial());
20790        }
20791
20792        public com.google.protobuf.Descriptors.Descriptor
20793            getDescriptorForType() {
20794          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
20795        }
20796
20797        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken getDefaultInstanceForType() {
20798          return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.getDefaultInstance();
20799        }
20800
20801        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken build() {
20802          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken result = buildPartial();
20803          if (!result.isInitialized()) {
20804            throw newUninitializedMessageException(result);
20805          }
20806          return result;
20807        }
20808
20809        public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken buildPartial() {
20810          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken(this);
20811          int from_bitField0_ = bitField0_;
20812          int to_bitField0_ = 0;
20813          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
20814            to_bitField0_ |= 0x00000001;
20815          }
20816          result.version_ = version_;
20817          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
20818            to_bitField0_ |= 0x00000002;
20819          }
20820          result.owner_ = owner_;
20821          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
20822            to_bitField0_ |= 0x00000004;
20823          }
20824          result.renewer_ = renewer_;
20825          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
20826            to_bitField0_ |= 0x00000008;
20827          }
20828          result.realUser_ = realUser_;
20829          if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
20830            to_bitField0_ |= 0x00000010;
20831          }
20832          result.issueDate_ = issueDate_;
20833          if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
20834            to_bitField0_ |= 0x00000020;
20835          }
20836          result.maxDate_ = maxDate_;
20837          if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
20838            to_bitField0_ |= 0x00000040;
20839          }
20840          result.sequenceNumber_ = sequenceNumber_;
20841          if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
20842            to_bitField0_ |= 0x00000080;
20843          }
20844          result.masterKeyId_ = masterKeyId_;
20845          if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
20846            to_bitField0_ |= 0x00000100;
20847          }
20848          result.expiryDate_ = expiryDate_;
20849          result.bitField0_ = to_bitField0_;
20850          onBuilt();
20851          return result;
20852        }
20853
20854        public Builder mergeFrom(com.google.protobuf.Message other) {
20855          if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken) {
20856            return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken)other);
20857          } else {
20858            super.mergeFrom(other);
20859            return this;
20860          }
20861        }
20862
20863        public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken other) {
20864          if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken.getDefaultInstance()) return this;
20865          if (other.hasVersion()) {
20866            setVersion(other.getVersion());
20867          }
20868          if (other.hasOwner()) {
20869            bitField0_ |= 0x00000002;
20870            owner_ = other.owner_;
20871            onChanged();
20872          }
20873          if (other.hasRenewer()) {
20874            bitField0_ |= 0x00000004;
20875            renewer_ = other.renewer_;
20876            onChanged();
20877          }
20878          if (other.hasRealUser()) {
20879            bitField0_ |= 0x00000008;
20880            realUser_ = other.realUser_;
20881            onChanged();
20882          }
20883          if (other.hasIssueDate()) {
20884            setIssueDate(other.getIssueDate());
20885          }
20886          if (other.hasMaxDate()) {
20887            setMaxDate(other.getMaxDate());
20888          }
20889          if (other.hasSequenceNumber()) {
20890            setSequenceNumber(other.getSequenceNumber());
20891          }
20892          if (other.hasMasterKeyId()) {
20893            setMasterKeyId(other.getMasterKeyId());
20894          }
20895          if (other.hasExpiryDate()) {
20896            setExpiryDate(other.getExpiryDate());
20897          }
20898          this.mergeUnknownFields(other.getUnknownFields());
20899          return this;
20900        }
20901
20902        public final boolean isInitialized() {
20903          return true;
20904        }
20905
20906        public Builder mergeFrom(
20907            com.google.protobuf.CodedInputStream input,
20908            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20909            throws java.io.IOException {
20910          org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken parsedMessage = null;
20911          try {
20912            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20913          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20914            parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.PersistToken) e.getUnfinishedMessage();
20915            throw e;
20916          } finally {
20917            if (parsedMessage != null) {
20918              mergeFrom(parsedMessage);
20919            }
20920          }
20921          return this;
20922        }
20923        private int bitField0_;
20924
20925        // optional uint32 version = 1;
20926        private int version_ ;
20927        /**
20928         * <code>optional uint32 version = 1;</code>
20929         */
20930        public boolean hasVersion() {
20931          return ((bitField0_ & 0x00000001) == 0x00000001);
20932        }
20933        /**
20934         * <code>optional uint32 version = 1;</code>
20935         */
20936        public int getVersion() {
20937          return version_;
20938        }
20939        /**
20940         * <code>optional uint32 version = 1;</code>
20941         */
20942        public Builder setVersion(int value) {
20943          bitField0_ |= 0x00000001;
20944          version_ = value;
20945          onChanged();
20946          return this;
20947        }
20948        /**
20949         * <code>optional uint32 version = 1;</code>
20950         */
20951        public Builder clearVersion() {
20952          bitField0_ = (bitField0_ & ~0x00000001);
20953          version_ = 0;
20954          onChanged();
20955          return this;
20956        }
20957
20958        // optional string owner = 2;
20959        private java.lang.Object owner_ = "";
20960        /**
20961         * <code>optional string owner = 2;</code>
20962         */
20963        public boolean hasOwner() {
20964          return ((bitField0_ & 0x00000002) == 0x00000002);
20965        }
20966        /**
20967         * <code>optional string owner = 2;</code>
20968         */
20969        public java.lang.String getOwner() {
20970          java.lang.Object ref = owner_;
20971          if (!(ref instanceof java.lang.String)) {
20972            java.lang.String s = ((com.google.protobuf.ByteString) ref)
20973                .toStringUtf8();
20974            owner_ = s;
20975            return s;
20976          } else {
20977            return (java.lang.String) ref;
20978          }
20979        }
20980        /**
20981         * <code>optional string owner = 2;</code>
20982         */
20983        public com.google.protobuf.ByteString
20984            getOwnerBytes() {
20985          java.lang.Object ref = owner_;
20986          if (ref instanceof String) {
20987            com.google.protobuf.ByteString b = 
20988                com.google.protobuf.ByteString.copyFromUtf8(
20989                    (java.lang.String) ref);
20990            owner_ = b;
20991            return b;
20992          } else {
20993            return (com.google.protobuf.ByteString) ref;
20994          }
20995        }
20996        /**
20997         * <code>optional string owner = 2;</code>
20998         */
20999        public Builder setOwner(
21000            java.lang.String value) {
21001          if (value == null) {
21002    throw new NullPointerException();
21003  }
21004  bitField0_ |= 0x00000002;
21005          owner_ = value;
21006          onChanged();
21007          return this;
21008        }
21009        /**
21010         * <code>optional string owner = 2;</code>
21011         */
21012        public Builder clearOwner() {
21013          bitField0_ = (bitField0_ & ~0x00000002);
21014          owner_ = getDefaultInstance().getOwner();
21015          onChanged();
21016          return this;
21017        }
21018        /**
21019         * <code>optional string owner = 2;</code>
21020         */
21021        public Builder setOwnerBytes(
21022            com.google.protobuf.ByteString value) {
21023          if (value == null) {
21024    throw new NullPointerException();
21025  }
21026  bitField0_ |= 0x00000002;
21027          owner_ = value;
21028          onChanged();
21029          return this;
21030        }
21031
21032        // optional string renewer = 3;
21033        private java.lang.Object renewer_ = "";
21034        /**
21035         * <code>optional string renewer = 3;</code>
21036         */
21037        public boolean hasRenewer() {
21038          return ((bitField0_ & 0x00000004) == 0x00000004);
21039        }
21040        /**
21041         * <code>optional string renewer = 3;</code>
21042         */
21043        public java.lang.String getRenewer() {
21044          java.lang.Object ref = renewer_;
21045          if (!(ref instanceof java.lang.String)) {
21046            java.lang.String s = ((com.google.protobuf.ByteString) ref)
21047                .toStringUtf8();
21048            renewer_ = s;
21049            return s;
21050          } else {
21051            return (java.lang.String) ref;
21052          }
21053        }
21054        /**
21055         * <code>optional string renewer = 3;</code>
21056         */
21057        public com.google.protobuf.ByteString
21058            getRenewerBytes() {
21059          java.lang.Object ref = renewer_;
21060          if (ref instanceof String) {
21061            com.google.protobuf.ByteString b = 
21062                com.google.protobuf.ByteString.copyFromUtf8(
21063                    (java.lang.String) ref);
21064            renewer_ = b;
21065            return b;
21066          } else {
21067            return (com.google.protobuf.ByteString) ref;
21068          }
21069        }
21070        /**
21071         * <code>optional string renewer = 3;</code>
21072         */
21073        public Builder setRenewer(
21074            java.lang.String value) {
21075          if (value == null) {
21076    throw new NullPointerException();
21077  }
21078  bitField0_ |= 0x00000004;
21079          renewer_ = value;
21080          onChanged();
21081          return this;
21082        }
21083        /**
21084         * <code>optional string renewer = 3;</code>
21085         */
21086        public Builder clearRenewer() {
21087          bitField0_ = (bitField0_ & ~0x00000004);
21088          renewer_ = getDefaultInstance().getRenewer();
21089          onChanged();
21090          return this;
21091        }
21092        /**
21093         * <code>optional string renewer = 3;</code>
21094         */
21095        public Builder setRenewerBytes(
21096            com.google.protobuf.ByteString value) {
21097          if (value == null) {
21098    throw new NullPointerException();
21099  }
21100  bitField0_ |= 0x00000004;
21101          renewer_ = value;
21102          onChanged();
21103          return this;
21104        }
21105
21106        // optional string realUser = 4;
21107        private java.lang.Object realUser_ = "";
21108        /**
21109         * <code>optional string realUser = 4;</code>
21110         */
21111        public boolean hasRealUser() {
21112          return ((bitField0_ & 0x00000008) == 0x00000008);
21113        }
21114        /**
21115         * <code>optional string realUser = 4;</code>
21116         */
21117        public java.lang.String getRealUser() {
21118          java.lang.Object ref = realUser_;
21119          if (!(ref instanceof java.lang.String)) {
21120            java.lang.String s = ((com.google.protobuf.ByteString) ref)
21121                .toStringUtf8();
21122            realUser_ = s;
21123            return s;
21124          } else {
21125            return (java.lang.String) ref;
21126          }
21127        }
21128        /**
21129         * <code>optional string realUser = 4;</code>
21130         */
21131        public com.google.protobuf.ByteString
21132            getRealUserBytes() {
21133          java.lang.Object ref = realUser_;
21134          if (ref instanceof String) {
21135            com.google.protobuf.ByteString b = 
21136                com.google.protobuf.ByteString.copyFromUtf8(
21137                    (java.lang.String) ref);
21138            realUser_ = b;
21139            return b;
21140          } else {
21141            return (com.google.protobuf.ByteString) ref;
21142          }
21143        }
21144        /**
21145         * <code>optional string realUser = 4;</code>
21146         */
21147        public Builder setRealUser(
21148            java.lang.String value) {
21149          if (value == null) {
21150    throw new NullPointerException();
21151  }
21152  bitField0_ |= 0x00000008;
21153          realUser_ = value;
21154          onChanged();
21155          return this;
21156        }
21157        /**
21158         * <code>optional string realUser = 4;</code>
21159         */
21160        public Builder clearRealUser() {
21161          bitField0_ = (bitField0_ & ~0x00000008);
21162          realUser_ = getDefaultInstance().getRealUser();
21163          onChanged();
21164          return this;
21165        }
21166        /**
21167         * <code>optional string realUser = 4;</code>
21168         */
21169        public Builder setRealUserBytes(
21170            com.google.protobuf.ByteString value) {
21171          if (value == null) {
21172    throw new NullPointerException();
21173  }
21174  bitField0_ |= 0x00000008;
21175          realUser_ = value;
21176          onChanged();
21177          return this;
21178        }
21179
21180        // optional uint64 issueDate = 5;
21181        private long issueDate_ ;
21182        /**
21183         * <code>optional uint64 issueDate = 5;</code>
21184         */
21185        public boolean hasIssueDate() {
21186          return ((bitField0_ & 0x00000010) == 0x00000010);
21187        }
21188        /**
21189         * <code>optional uint64 issueDate = 5;</code>
21190         */
21191        public long getIssueDate() {
21192          return issueDate_;
21193        }
21194        /**
21195         * <code>optional uint64 issueDate = 5;</code>
21196         */
21197        public Builder setIssueDate(long value) {
21198          bitField0_ |= 0x00000010;
21199          issueDate_ = value;
21200          onChanged();
21201          return this;
21202        }
21203        /**
21204         * <code>optional uint64 issueDate = 5;</code>
21205         */
21206        public Builder clearIssueDate() {
21207          bitField0_ = (bitField0_ & ~0x00000010);
21208          issueDate_ = 0L;
21209          onChanged();
21210          return this;
21211        }
21212
21213        // optional uint64 maxDate = 6;
21214        private long maxDate_ ;
21215        /**
21216         * <code>optional uint64 maxDate = 6;</code>
21217         */
21218        public boolean hasMaxDate() {
21219          return ((bitField0_ & 0x00000020) == 0x00000020);
21220        }
21221        /**
21222         * <code>optional uint64 maxDate = 6;</code>
21223         */
21224        public long getMaxDate() {
21225          return maxDate_;
21226        }
21227        /**
21228         * <code>optional uint64 maxDate = 6;</code>
21229         */
21230        public Builder setMaxDate(long value) {
21231          bitField0_ |= 0x00000020;
21232          maxDate_ = value;
21233          onChanged();
21234          return this;
21235        }
21236        /**
21237         * <code>optional uint64 maxDate = 6;</code>
21238         */
21239        public Builder clearMaxDate() {
21240          bitField0_ = (bitField0_ & ~0x00000020);
21241          maxDate_ = 0L;
21242          onChanged();
21243          return this;
21244        }
21245
21246        // optional uint32 sequenceNumber = 7;
21247        private int sequenceNumber_ ;
21248        /**
21249         * <code>optional uint32 sequenceNumber = 7;</code>
21250         */
21251        public boolean hasSequenceNumber() {
21252          return ((bitField0_ & 0x00000040) == 0x00000040);
21253        }
21254        /**
21255         * <code>optional uint32 sequenceNumber = 7;</code>
21256         */
21257        public int getSequenceNumber() {
21258          return sequenceNumber_;
21259        }
21260        /**
21261         * <code>optional uint32 sequenceNumber = 7;</code>
21262         */
21263        public Builder setSequenceNumber(int value) {
21264          bitField0_ |= 0x00000040;
21265          sequenceNumber_ = value;
21266          onChanged();
21267          return this;
21268        }
21269        /**
21270         * <code>optional uint32 sequenceNumber = 7;</code>
21271         */
21272        public Builder clearSequenceNumber() {
21273          bitField0_ = (bitField0_ & ~0x00000040);
21274          sequenceNumber_ = 0;
21275          onChanged();
21276          return this;
21277        }
21278
21279        // optional uint32 masterKeyId = 8;
21280        private int masterKeyId_ ;
21281        /**
21282         * <code>optional uint32 masterKeyId = 8;</code>
21283         */
21284        public boolean hasMasterKeyId() {
21285          return ((bitField0_ & 0x00000080) == 0x00000080);
21286        }
21287        /**
21288         * <code>optional uint32 masterKeyId = 8;</code>
21289         */
21290        public int getMasterKeyId() {
21291          return masterKeyId_;
21292        }
21293        /**
21294         * <code>optional uint32 masterKeyId = 8;</code>
21295         */
21296        public Builder setMasterKeyId(int value) {
21297          bitField0_ |= 0x00000080;
21298          masterKeyId_ = value;
21299          onChanged();
21300          return this;
21301        }
21302        /**
21303         * <code>optional uint32 masterKeyId = 8;</code>
21304         */
21305        public Builder clearMasterKeyId() {
21306          bitField0_ = (bitField0_ & ~0x00000080);
21307          masterKeyId_ = 0;
21308          onChanged();
21309          return this;
21310        }
21311
21312        // optional uint64 expiryDate = 9;
21313        private long expiryDate_ ;
21314        /**
21315         * <code>optional uint64 expiryDate = 9;</code>
21316         */
21317        public boolean hasExpiryDate() {
21318          return ((bitField0_ & 0x00000100) == 0x00000100);
21319        }
21320        /**
21321         * <code>optional uint64 expiryDate = 9;</code>
21322         */
21323        public long getExpiryDate() {
21324          return expiryDate_;
21325        }
21326        /**
21327         * <code>optional uint64 expiryDate = 9;</code>
21328         */
21329        public Builder setExpiryDate(long value) {
21330          bitField0_ |= 0x00000100;
21331          expiryDate_ = value;
21332          onChanged();
21333          return this;
21334        }
21335        /**
21336         * <code>optional uint64 expiryDate = 9;</code>
21337         */
21338        public Builder clearExpiryDate() {
21339          bitField0_ = (bitField0_ & ~0x00000100);
21340          expiryDate_ = 0L;
21341          onChanged();
21342          return this;
21343        }
21344
21345        // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SecretManagerSection.PersistToken)
21346      }
21347
21348      static {
21349        defaultInstance = new PersistToken(true);
21350        defaultInstance.initFields();
21351      }
21352
21353      // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SecretManagerSection.PersistToken)
21354    }
21355
21356    private int bitField0_;
21357    // optional uint32 currentId = 1;
21358    public static final int CURRENTID_FIELD_NUMBER = 1;
21359    private int currentId_;
21360    /**
21361     * <code>optional uint32 currentId = 1;</code>
21362     */
21363    public boolean hasCurrentId() {
21364      return ((bitField0_ & 0x00000001) == 0x00000001);
21365    }
21366    /**
21367     * <code>optional uint32 currentId = 1;</code>
21368     */
21369    public int getCurrentId() {
21370      return currentId_;
21371    }
21372
21373    // optional uint32 tokenSequenceNumber = 2;
21374    public static final int TOKENSEQUENCENUMBER_FIELD_NUMBER = 2;
21375    private int tokenSequenceNumber_;
21376    /**
21377     * <code>optional uint32 tokenSequenceNumber = 2;</code>
21378     */
21379    public boolean hasTokenSequenceNumber() {
21380      return ((bitField0_ & 0x00000002) == 0x00000002);
21381    }
21382    /**
21383     * <code>optional uint32 tokenSequenceNumber = 2;</code>
21384     */
21385    public int getTokenSequenceNumber() {
21386      return tokenSequenceNumber_;
21387    }
21388
21389    // optional uint32 numKeys = 3;
21390    public static final int NUMKEYS_FIELD_NUMBER = 3;
21391    private int numKeys_;
21392    /**
21393     * <code>optional uint32 numKeys = 3;</code>
21394     */
21395    public boolean hasNumKeys() {
21396      return ((bitField0_ & 0x00000004) == 0x00000004);
21397    }
21398    /**
21399     * <code>optional uint32 numKeys = 3;</code>
21400     */
21401    public int getNumKeys() {
21402      return numKeys_;
21403    }
21404
21405    // optional uint32 numTokens = 4;
21406    public static final int NUMTOKENS_FIELD_NUMBER = 4;
21407    private int numTokens_;
21408    /**
21409     * <code>optional uint32 numTokens = 4;</code>
21410     *
21411     * <pre>
21412     * repeated DelegationKey keys
21413     * repeated PersistToken tokens
21414     * </pre>
21415     */
21416    public boolean hasNumTokens() {
21417      return ((bitField0_ & 0x00000008) == 0x00000008);
21418    }
21419    /**
21420     * <code>optional uint32 numTokens = 4;</code>
21421     *
21422     * <pre>
21423     * repeated DelegationKey keys
21424     * repeated PersistToken tokens
21425     * </pre>
21426     */
21427    public int getNumTokens() {
21428      return numTokens_;
21429    }
21430
21431    private void initFields() {
21432      currentId_ = 0;
21433      tokenSequenceNumber_ = 0;
21434      numKeys_ = 0;
21435      numTokens_ = 0;
21436    }
21437    private byte memoizedIsInitialized = -1;
21438    public final boolean isInitialized() {
21439      byte isInitialized = memoizedIsInitialized;
21440      if (isInitialized != -1) return isInitialized == 1;
21441
21442      memoizedIsInitialized = 1;
21443      return true;
21444    }
21445
21446    public void writeTo(com.google.protobuf.CodedOutputStream output)
21447                        throws java.io.IOException {
21448      getSerializedSize();
21449      if (((bitField0_ & 0x00000001) == 0x00000001)) {
21450        output.writeUInt32(1, currentId_);
21451      }
21452      if (((bitField0_ & 0x00000002) == 0x00000002)) {
21453        output.writeUInt32(2, tokenSequenceNumber_);
21454      }
21455      if (((bitField0_ & 0x00000004) == 0x00000004)) {
21456        output.writeUInt32(3, numKeys_);
21457      }
21458      if (((bitField0_ & 0x00000008) == 0x00000008)) {
21459        output.writeUInt32(4, numTokens_);
21460      }
21461      getUnknownFields().writeTo(output);
21462    }
21463
21464    private int memoizedSerializedSize = -1;
21465    public int getSerializedSize() {
21466      int size = memoizedSerializedSize;
21467      if (size != -1) return size;
21468
21469      size = 0;
21470      if (((bitField0_ & 0x00000001) == 0x00000001)) {
21471        size += com.google.protobuf.CodedOutputStream
21472          .computeUInt32Size(1, currentId_);
21473      }
21474      if (((bitField0_ & 0x00000002) == 0x00000002)) {
21475        size += com.google.protobuf.CodedOutputStream
21476          .computeUInt32Size(2, tokenSequenceNumber_);
21477      }
21478      if (((bitField0_ & 0x00000004) == 0x00000004)) {
21479        size += com.google.protobuf.CodedOutputStream
21480          .computeUInt32Size(3, numKeys_);
21481      }
21482      if (((bitField0_ & 0x00000008) == 0x00000008)) {
21483        size += com.google.protobuf.CodedOutputStream
21484          .computeUInt32Size(4, numTokens_);
21485      }
21486      size += getUnknownFields().getSerializedSize();
21487      memoizedSerializedSize = size;
21488      return size;
21489    }
21490
21491    private static final long serialVersionUID = 0L;
21492    @java.lang.Override
21493    protected java.lang.Object writeReplace()
21494        throws java.io.ObjectStreamException {
21495      return super.writeReplace();
21496    }
21497
21498    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21499        com.google.protobuf.ByteString data)
21500        throws com.google.protobuf.InvalidProtocolBufferException {
21501      return PARSER.parseFrom(data);
21502    }
21503    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21504        com.google.protobuf.ByteString data,
21505        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21506        throws com.google.protobuf.InvalidProtocolBufferException {
21507      return PARSER.parseFrom(data, extensionRegistry);
21508    }
21509    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(byte[] data)
21510        throws com.google.protobuf.InvalidProtocolBufferException {
21511      return PARSER.parseFrom(data);
21512    }
21513    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21514        byte[] data,
21515        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21516        throws com.google.protobuf.InvalidProtocolBufferException {
21517      return PARSER.parseFrom(data, extensionRegistry);
21518    }
21519    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(java.io.InputStream input)
21520        throws java.io.IOException {
21521      return PARSER.parseFrom(input);
21522    }
21523    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21524        java.io.InputStream input,
21525        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21526        throws java.io.IOException {
21527      return PARSER.parseFrom(input, extensionRegistry);
21528    }
21529    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseDelimitedFrom(java.io.InputStream input)
21530        throws java.io.IOException {
21531      return PARSER.parseDelimitedFrom(input);
21532    }
21533    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseDelimitedFrom(
21534        java.io.InputStream input,
21535        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21536        throws java.io.IOException {
21537      return PARSER.parseDelimitedFrom(input, extensionRegistry);
21538    }
21539    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21540        com.google.protobuf.CodedInputStream input)
21541        throws java.io.IOException {
21542      return PARSER.parseFrom(input);
21543    }
21544    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parseFrom(
21545        com.google.protobuf.CodedInputStream input,
21546        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21547        throws java.io.IOException {
21548      return PARSER.parseFrom(input, extensionRegistry);
21549    }
21550
21551    public static Builder newBuilder() { return Builder.create(); }
21552    public Builder newBuilderForType() { return newBuilder(); }
21553    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection prototype) {
21554      return newBuilder().mergeFrom(prototype);
21555    }
21556    public Builder toBuilder() { return newBuilder(this); }
21557
21558    @java.lang.Override
21559    protected Builder newBuilderForType(
21560        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21561      Builder builder = new Builder(parent);
21562      return builder;
21563    }
21564    /**
21565     * Protobuf type {@code hadoop.hdfs.fsimage.SecretManagerSection}
21566     */
21567    public static final class Builder extends
21568        com.google.protobuf.GeneratedMessage.Builder<Builder>
21569       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSectionOrBuilder {
21570      public static final com.google.protobuf.Descriptors.Descriptor
21571          getDescriptor() {
21572        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
21573      }
21574
21575      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21576          internalGetFieldAccessorTable() {
21577        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable
21578            .ensureFieldAccessorsInitialized(
21579                org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.Builder.class);
21580      }
21581
21582      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.newBuilder()
21583      private Builder() {
21584        maybeForceBuilderInitialization();
21585      }
21586
21587      private Builder(
21588          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21589        super(parent);
21590        maybeForceBuilderInitialization();
21591      }
21592      private void maybeForceBuilderInitialization() {
21593        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
21594        }
21595      }
21596      private static Builder create() {
21597        return new Builder();
21598      }
21599
21600      public Builder clear() {
21601        super.clear();
21602        currentId_ = 0;
21603        bitField0_ = (bitField0_ & ~0x00000001);
21604        tokenSequenceNumber_ = 0;
21605        bitField0_ = (bitField0_ & ~0x00000002);
21606        numKeys_ = 0;
21607        bitField0_ = (bitField0_ & ~0x00000004);
21608        numTokens_ = 0;
21609        bitField0_ = (bitField0_ & ~0x00000008);
21610        return this;
21611      }
21612
21613      public Builder clone() {
21614        return create().mergeFrom(buildPartial());
21615      }
21616
21617      public com.google.protobuf.Descriptors.Descriptor
21618          getDescriptorForType() {
21619        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
21620      }
21621
21622      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection getDefaultInstanceForType() {
21623        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.getDefaultInstance();
21624      }
21625
21626      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection build() {
21627        org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection result = buildPartial();
21628        if (!result.isInitialized()) {
21629          throw newUninitializedMessageException(result);
21630        }
21631        return result;
21632      }
21633
21634      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection buildPartial() {
21635        org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection(this);
21636        int from_bitField0_ = bitField0_;
21637        int to_bitField0_ = 0;
21638        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
21639          to_bitField0_ |= 0x00000001;
21640        }
21641        result.currentId_ = currentId_;
21642        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
21643          to_bitField0_ |= 0x00000002;
21644        }
21645        result.tokenSequenceNumber_ = tokenSequenceNumber_;
21646        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
21647          to_bitField0_ |= 0x00000004;
21648        }
21649        result.numKeys_ = numKeys_;
21650        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
21651          to_bitField0_ |= 0x00000008;
21652        }
21653        result.numTokens_ = numTokens_;
21654        result.bitField0_ = to_bitField0_;
21655        onBuilt();
21656        return result;
21657      }
21658
21659      public Builder mergeFrom(com.google.protobuf.Message other) {
21660        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection) {
21661          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection)other);
21662        } else {
21663          super.mergeFrom(other);
21664          return this;
21665        }
21666      }
21667
21668      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection other) {
21669        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection.getDefaultInstance()) return this;
21670        if (other.hasCurrentId()) {
21671          setCurrentId(other.getCurrentId());
21672        }
21673        if (other.hasTokenSequenceNumber()) {
21674          setTokenSequenceNumber(other.getTokenSequenceNumber());
21675        }
21676        if (other.hasNumKeys()) {
21677          setNumKeys(other.getNumKeys());
21678        }
21679        if (other.hasNumTokens()) {
21680          setNumTokens(other.getNumTokens());
21681        }
21682        this.mergeUnknownFields(other.getUnknownFields());
21683        return this;
21684      }
21685
21686      public final boolean isInitialized() {
21687        return true;
21688      }
21689
21690      public Builder mergeFrom(
21691          com.google.protobuf.CodedInputStream input,
21692          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21693          throws java.io.IOException {
21694        org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection parsedMessage = null;
21695        try {
21696          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21697        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21698          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection) e.getUnfinishedMessage();
21699          throw e;
21700        } finally {
21701          if (parsedMessage != null) {
21702            mergeFrom(parsedMessage);
21703          }
21704        }
21705        return this;
21706      }
21707      private int bitField0_;
21708
21709      // optional uint32 currentId = 1;
21710      private int currentId_ ;
21711      /**
21712       * <code>optional uint32 currentId = 1;</code>
21713       */
21714      public boolean hasCurrentId() {
21715        return ((bitField0_ & 0x00000001) == 0x00000001);
21716      }
21717      /**
21718       * <code>optional uint32 currentId = 1;</code>
21719       */
21720      public int getCurrentId() {
21721        return currentId_;
21722      }
21723      /**
21724       * <code>optional uint32 currentId = 1;</code>
21725       */
21726      public Builder setCurrentId(int value) {
21727        bitField0_ |= 0x00000001;
21728        currentId_ = value;
21729        onChanged();
21730        return this;
21731      }
21732      /**
21733       * <code>optional uint32 currentId = 1;</code>
21734       */
21735      public Builder clearCurrentId() {
21736        bitField0_ = (bitField0_ & ~0x00000001);
21737        currentId_ = 0;
21738        onChanged();
21739        return this;
21740      }
21741
21742      // optional uint32 tokenSequenceNumber = 2;
21743      private int tokenSequenceNumber_ ;
21744      /**
21745       * <code>optional uint32 tokenSequenceNumber = 2;</code>
21746       */
21747      public boolean hasTokenSequenceNumber() {
21748        return ((bitField0_ & 0x00000002) == 0x00000002);
21749      }
21750      /**
21751       * <code>optional uint32 tokenSequenceNumber = 2;</code>
21752       */
21753      public int getTokenSequenceNumber() {
21754        return tokenSequenceNumber_;
21755      }
21756      /**
21757       * <code>optional uint32 tokenSequenceNumber = 2;</code>
21758       */
21759      public Builder setTokenSequenceNumber(int value) {
21760        bitField0_ |= 0x00000002;
21761        tokenSequenceNumber_ = value;
21762        onChanged();
21763        return this;
21764      }
21765      /**
21766       * <code>optional uint32 tokenSequenceNumber = 2;</code>
21767       */
21768      public Builder clearTokenSequenceNumber() {
21769        bitField0_ = (bitField0_ & ~0x00000002);
21770        tokenSequenceNumber_ = 0;
21771        onChanged();
21772        return this;
21773      }
21774
21775      // optional uint32 numKeys = 3;
21776      private int numKeys_ ;
21777      /**
21778       * <code>optional uint32 numKeys = 3;</code>
21779       */
21780      public boolean hasNumKeys() {
21781        return ((bitField0_ & 0x00000004) == 0x00000004);
21782      }
21783      /**
21784       * <code>optional uint32 numKeys = 3;</code>
21785       */
21786      public int getNumKeys() {
21787        return numKeys_;
21788      }
21789      /**
21790       * <code>optional uint32 numKeys = 3;</code>
21791       */
21792      public Builder setNumKeys(int value) {
21793        bitField0_ |= 0x00000004;
21794        numKeys_ = value;
21795        onChanged();
21796        return this;
21797      }
21798      /**
21799       * <code>optional uint32 numKeys = 3;</code>
21800       */
21801      public Builder clearNumKeys() {
21802        bitField0_ = (bitField0_ & ~0x00000004);
21803        numKeys_ = 0;
21804        onChanged();
21805        return this;
21806      }
21807
21808      // optional uint32 numTokens = 4;
21809      private int numTokens_ ;
21810      /**
21811       * <code>optional uint32 numTokens = 4;</code>
21812       *
21813       * <pre>
21814       * repeated DelegationKey keys
21815       * repeated PersistToken tokens
21816       * </pre>
21817       */
21818      public boolean hasNumTokens() {
21819        return ((bitField0_ & 0x00000008) == 0x00000008);
21820      }
21821      /**
21822       * <code>optional uint32 numTokens = 4;</code>
21823       *
21824       * <pre>
21825       * repeated DelegationKey keys
21826       * repeated PersistToken tokens
21827       * </pre>
21828       */
21829      public int getNumTokens() {
21830        return numTokens_;
21831      }
21832      /**
21833       * <code>optional uint32 numTokens = 4;</code>
21834       *
21835       * <pre>
21836       * repeated DelegationKey keys
21837       * repeated PersistToken tokens
21838       * </pre>
21839       */
21840      public Builder setNumTokens(int value) {
21841        bitField0_ |= 0x00000008;
21842        numTokens_ = value;
21843        onChanged();
21844        return this;
21845      }
21846      /**
21847       * <code>optional uint32 numTokens = 4;</code>
21848       *
21849       * <pre>
21850       * repeated DelegationKey keys
21851       * repeated PersistToken tokens
21852       * </pre>
21853       */
21854      public Builder clearNumTokens() {
21855        bitField0_ = (bitField0_ & ~0x00000008);
21856        numTokens_ = 0;
21857        onChanged();
21858        return this;
21859      }
21860
21861      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.SecretManagerSection)
21862    }
21863
21864    static {
21865      defaultInstance = new SecretManagerSection(true);
21866      defaultInstance.initFields();
21867    }
21868
21869    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.SecretManagerSection)
21870  }
21871
21872  public interface CacheManagerSectionOrBuilder
21873      extends com.google.protobuf.MessageOrBuilder {
21874
21875    // required uint64 nextDirectiveId = 1;
21876    /**
21877     * <code>required uint64 nextDirectiveId = 1;</code>
21878     */
21879    boolean hasNextDirectiveId();
21880    /**
21881     * <code>required uint64 nextDirectiveId = 1;</code>
21882     */
21883    long getNextDirectiveId();
21884
21885    // required uint32 numPools = 2;
21886    /**
21887     * <code>required uint32 numPools = 2;</code>
21888     */
21889    boolean hasNumPools();
21890    /**
21891     * <code>required uint32 numPools = 2;</code>
21892     */
21893    int getNumPools();
21894
21895    // required uint32 numDirectives = 3;
21896    /**
21897     * <code>required uint32 numDirectives = 3;</code>
21898     *
21899     * <pre>
21900     * repeated CachePoolInfoProto pools
21901     * repeated CacheDirectiveInfoProto directives
21902     * </pre>
21903     */
21904    boolean hasNumDirectives();
21905    /**
21906     * <code>required uint32 numDirectives = 3;</code>
21907     *
21908     * <pre>
21909     * repeated CachePoolInfoProto pools
21910     * repeated CacheDirectiveInfoProto directives
21911     * </pre>
21912     */
21913    int getNumDirectives();
21914  }
21915  /**
21916   * Protobuf type {@code hadoop.hdfs.fsimage.CacheManagerSection}
21917   */
21918  public static final class CacheManagerSection extends
21919      com.google.protobuf.GeneratedMessage
21920      implements CacheManagerSectionOrBuilder {
21921    // Use CacheManagerSection.newBuilder() to construct.
21922    private CacheManagerSection(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
21923      super(builder);
21924      this.unknownFields = builder.getUnknownFields();
21925    }
21926    private CacheManagerSection(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
21927
21928    private static final CacheManagerSection defaultInstance;
21929    public static CacheManagerSection getDefaultInstance() {
21930      return defaultInstance;
21931    }
21932
21933    public CacheManagerSection getDefaultInstanceForType() {
21934      return defaultInstance;
21935    }
21936
21937    private final com.google.protobuf.UnknownFieldSet unknownFields;
21938    @java.lang.Override
21939    public final com.google.protobuf.UnknownFieldSet
21940        getUnknownFields() {
21941      return this.unknownFields;
21942    }
21943    private CacheManagerSection(
21944        com.google.protobuf.CodedInputStream input,
21945        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21946        throws com.google.protobuf.InvalidProtocolBufferException {
21947      initFields();
21948      int mutable_bitField0_ = 0;
21949      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
21950          com.google.protobuf.UnknownFieldSet.newBuilder();
21951      try {
21952        boolean done = false;
21953        while (!done) {
21954          int tag = input.readTag();
21955          switch (tag) {
21956            case 0:
21957              done = true;
21958              break;
21959            default: {
21960              if (!parseUnknownField(input, unknownFields,
21961                                     extensionRegistry, tag)) {
21962                done = true;
21963              }
21964              break;
21965            }
21966            case 8: {
21967              bitField0_ |= 0x00000001;
21968              nextDirectiveId_ = input.readUInt64();
21969              break;
21970            }
21971            case 16: {
21972              bitField0_ |= 0x00000002;
21973              numPools_ = input.readUInt32();
21974              break;
21975            }
21976            case 24: {
21977              bitField0_ |= 0x00000004;
21978              numDirectives_ = input.readUInt32();
21979              break;
21980            }
21981          }
21982        }
21983      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21984        throw e.setUnfinishedMessage(this);
21985      } catch (java.io.IOException e) {
21986        throw new com.google.protobuf.InvalidProtocolBufferException(
21987            e.getMessage()).setUnfinishedMessage(this);
21988      } finally {
21989        this.unknownFields = unknownFields.build();
21990        makeExtensionsImmutable();
21991      }
21992    }
21993    public static final com.google.protobuf.Descriptors.Descriptor
21994        getDescriptor() {
21995      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
21996    }
21997
21998    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21999        internalGetFieldAccessorTable() {
22000      return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable
22001          .ensureFieldAccessorsInitialized(
22002              org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.Builder.class);
22003    }
22004
22005    public static com.google.protobuf.Parser<CacheManagerSection> PARSER =
22006        new com.google.protobuf.AbstractParser<CacheManagerSection>() {
22007      public CacheManagerSection parsePartialFrom(
22008          com.google.protobuf.CodedInputStream input,
22009          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22010          throws com.google.protobuf.InvalidProtocolBufferException {
22011        return new CacheManagerSection(input, extensionRegistry);
22012      }
22013    };
22014
22015    @java.lang.Override
22016    public com.google.protobuf.Parser<CacheManagerSection> getParserForType() {
22017      return PARSER;
22018    }
22019
22020    private int bitField0_;
22021    // required uint64 nextDirectiveId = 1;
22022    public static final int NEXTDIRECTIVEID_FIELD_NUMBER = 1;
22023    private long nextDirectiveId_;
22024    /**
22025     * <code>required uint64 nextDirectiveId = 1;</code>
22026     */
22027    public boolean hasNextDirectiveId() {
22028      return ((bitField0_ & 0x00000001) == 0x00000001);
22029    }
22030    /**
22031     * <code>required uint64 nextDirectiveId = 1;</code>
22032     */
22033    public long getNextDirectiveId() {
22034      return nextDirectiveId_;
22035    }
22036
22037    // required uint32 numPools = 2;
22038    public static final int NUMPOOLS_FIELD_NUMBER = 2;
22039    private int numPools_;
22040    /**
22041     * <code>required uint32 numPools = 2;</code>
22042     */
22043    public boolean hasNumPools() {
22044      return ((bitField0_ & 0x00000002) == 0x00000002);
22045    }
22046    /**
22047     * <code>required uint32 numPools = 2;</code>
22048     */
22049    public int getNumPools() {
22050      return numPools_;
22051    }
22052
22053    // required uint32 numDirectives = 3;
22054    public static final int NUMDIRECTIVES_FIELD_NUMBER = 3;
22055    private int numDirectives_;
22056    /**
22057     * <code>required uint32 numDirectives = 3;</code>
22058     *
22059     * <pre>
22060     * repeated CachePoolInfoProto pools
22061     * repeated CacheDirectiveInfoProto directives
22062     * </pre>
22063     */
22064    public boolean hasNumDirectives() {
22065      return ((bitField0_ & 0x00000004) == 0x00000004);
22066    }
22067    /**
22068     * <code>required uint32 numDirectives = 3;</code>
22069     *
22070     * <pre>
22071     * repeated CachePoolInfoProto pools
22072     * repeated CacheDirectiveInfoProto directives
22073     * </pre>
22074     */
22075    public int getNumDirectives() {
22076      return numDirectives_;
22077    }
22078
22079    private void initFields() {
22080      nextDirectiveId_ = 0L;
22081      numPools_ = 0;
22082      numDirectives_ = 0;
22083    }
22084    private byte memoizedIsInitialized = -1;
22085    public final boolean isInitialized() {
22086      byte isInitialized = memoizedIsInitialized;
22087      if (isInitialized != -1) return isInitialized == 1;
22088
22089      if (!hasNextDirectiveId()) {
22090        memoizedIsInitialized = 0;
22091        return false;
22092      }
22093      if (!hasNumPools()) {
22094        memoizedIsInitialized = 0;
22095        return false;
22096      }
22097      if (!hasNumDirectives()) {
22098        memoizedIsInitialized = 0;
22099        return false;
22100      }
22101      memoizedIsInitialized = 1;
22102      return true;
22103    }
22104
22105    public void writeTo(com.google.protobuf.CodedOutputStream output)
22106                        throws java.io.IOException {
22107      getSerializedSize();
22108      if (((bitField0_ & 0x00000001) == 0x00000001)) {
22109        output.writeUInt64(1, nextDirectiveId_);
22110      }
22111      if (((bitField0_ & 0x00000002) == 0x00000002)) {
22112        output.writeUInt32(2, numPools_);
22113      }
22114      if (((bitField0_ & 0x00000004) == 0x00000004)) {
22115        output.writeUInt32(3, numDirectives_);
22116      }
22117      getUnknownFields().writeTo(output);
22118    }
22119
22120    private int memoizedSerializedSize = -1;
22121    public int getSerializedSize() {
22122      int size = memoizedSerializedSize;
22123      if (size != -1) return size;
22124
22125      size = 0;
22126      if (((bitField0_ & 0x00000001) == 0x00000001)) {
22127        size += com.google.protobuf.CodedOutputStream
22128          .computeUInt64Size(1, nextDirectiveId_);
22129      }
22130      if (((bitField0_ & 0x00000002) == 0x00000002)) {
22131        size += com.google.protobuf.CodedOutputStream
22132          .computeUInt32Size(2, numPools_);
22133      }
22134      if (((bitField0_ & 0x00000004) == 0x00000004)) {
22135        size += com.google.protobuf.CodedOutputStream
22136          .computeUInt32Size(3, numDirectives_);
22137      }
22138      size += getUnknownFields().getSerializedSize();
22139      memoizedSerializedSize = size;
22140      return size;
22141    }
22142
22143    private static final long serialVersionUID = 0L;
22144    @java.lang.Override
22145    protected java.lang.Object writeReplace()
22146        throws java.io.ObjectStreamException {
22147      return super.writeReplace();
22148    }
22149
22150    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22151        com.google.protobuf.ByteString data)
22152        throws com.google.protobuf.InvalidProtocolBufferException {
22153      return PARSER.parseFrom(data);
22154    }
22155    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22156        com.google.protobuf.ByteString data,
22157        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22158        throws com.google.protobuf.InvalidProtocolBufferException {
22159      return PARSER.parseFrom(data, extensionRegistry);
22160    }
22161    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(byte[] data)
22162        throws com.google.protobuf.InvalidProtocolBufferException {
22163      return PARSER.parseFrom(data);
22164    }
22165    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22166        byte[] data,
22167        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22168        throws com.google.protobuf.InvalidProtocolBufferException {
22169      return PARSER.parseFrom(data, extensionRegistry);
22170    }
22171    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(java.io.InputStream input)
22172        throws java.io.IOException {
22173      return PARSER.parseFrom(input);
22174    }
22175    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22176        java.io.InputStream input,
22177        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22178        throws java.io.IOException {
22179      return PARSER.parseFrom(input, extensionRegistry);
22180    }
22181    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseDelimitedFrom(java.io.InputStream input)
22182        throws java.io.IOException {
22183      return PARSER.parseDelimitedFrom(input);
22184    }
22185    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseDelimitedFrom(
22186        java.io.InputStream input,
22187        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22188        throws java.io.IOException {
22189      return PARSER.parseDelimitedFrom(input, extensionRegistry);
22190    }
22191    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22192        com.google.protobuf.CodedInputStream input)
22193        throws java.io.IOException {
22194      return PARSER.parseFrom(input);
22195    }
22196    public static org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parseFrom(
22197        com.google.protobuf.CodedInputStream input,
22198        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22199        throws java.io.IOException {
22200      return PARSER.parseFrom(input, extensionRegistry);
22201    }
22202
22203    public static Builder newBuilder() { return Builder.create(); }
22204    public Builder newBuilderForType() { return newBuilder(); }
22205    public static Builder newBuilder(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection prototype) {
22206      return newBuilder().mergeFrom(prototype);
22207    }
22208    public Builder toBuilder() { return newBuilder(this); }
22209
22210    @java.lang.Override
22211    protected Builder newBuilderForType(
22212        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22213      Builder builder = new Builder(parent);
22214      return builder;
22215    }
22216    /**
22217     * Protobuf type {@code hadoop.hdfs.fsimage.CacheManagerSection}
22218     */
22219    public static final class Builder extends
22220        com.google.protobuf.GeneratedMessage.Builder<Builder>
22221       implements org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSectionOrBuilder {
22222      public static final com.google.protobuf.Descriptors.Descriptor
22223          getDescriptor() {
22224        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
22225      }
22226
22227      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
22228          internalGetFieldAccessorTable() {
22229        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable
22230            .ensureFieldAccessorsInitialized(
22231                org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.class, org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.Builder.class);
22232      }
22233
22234      // Construct using org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.newBuilder()
22235      private Builder() {
22236        maybeForceBuilderInitialization();
22237      }
22238
22239      private Builder(
22240          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22241        super(parent);
22242        maybeForceBuilderInitialization();
22243      }
22244      private void maybeForceBuilderInitialization() {
22245        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
22246        }
22247      }
22248      private static Builder create() {
22249        return new Builder();
22250      }
22251
22252      public Builder clear() {
22253        super.clear();
22254        nextDirectiveId_ = 0L;
22255        bitField0_ = (bitField0_ & ~0x00000001);
22256        numPools_ = 0;
22257        bitField0_ = (bitField0_ & ~0x00000002);
22258        numDirectives_ = 0;
22259        bitField0_ = (bitField0_ & ~0x00000004);
22260        return this;
22261      }
22262
22263      public Builder clone() {
22264        return create().mergeFrom(buildPartial());
22265      }
22266
22267      public com.google.protobuf.Descriptors.Descriptor
22268          getDescriptorForType() {
22269        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
22270      }
22271
22272      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection getDefaultInstanceForType() {
22273        return org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.getDefaultInstance();
22274      }
22275
22276      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection build() {
22277        org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection result = buildPartial();
22278        if (!result.isInitialized()) {
22279          throw newUninitializedMessageException(result);
22280        }
22281        return result;
22282      }
22283
22284      public org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection buildPartial() {
22285        org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection(this);
22286        int from_bitField0_ = bitField0_;
22287        int to_bitField0_ = 0;
22288        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
22289          to_bitField0_ |= 0x00000001;
22290        }
22291        result.nextDirectiveId_ = nextDirectiveId_;
22292        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
22293          to_bitField0_ |= 0x00000002;
22294        }
22295        result.numPools_ = numPools_;
22296        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
22297          to_bitField0_ |= 0x00000004;
22298        }
22299        result.numDirectives_ = numDirectives_;
22300        result.bitField0_ = to_bitField0_;
22301        onBuilt();
22302        return result;
22303      }
22304
22305      public Builder mergeFrom(com.google.protobuf.Message other) {
22306        if (other instanceof org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) {
22307          return mergeFrom((org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection)other);
22308        } else {
22309          super.mergeFrom(other);
22310          return this;
22311        }
22312      }
22313
22314      public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection other) {
22315        if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection.getDefaultInstance()) return this;
22316        if (other.hasNextDirectiveId()) {
22317          setNextDirectiveId(other.getNextDirectiveId());
22318        }
22319        if (other.hasNumPools()) {
22320          setNumPools(other.getNumPools());
22321        }
22322        if (other.hasNumDirectives()) {
22323          setNumDirectives(other.getNumDirectives());
22324        }
22325        this.mergeUnknownFields(other.getUnknownFields());
22326        return this;
22327      }
22328
22329      public final boolean isInitialized() {
22330        if (!hasNextDirectiveId()) {
22331          
22332          return false;
22333        }
22334        if (!hasNumPools()) {
22335          
22336          return false;
22337        }
22338        if (!hasNumDirectives()) {
22339          
22340          return false;
22341        }
22342        return true;
22343      }
22344
22345      public Builder mergeFrom(
22346          com.google.protobuf.CodedInputStream input,
22347          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22348          throws java.io.IOException {
22349        org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection parsedMessage = null;
22350        try {
22351          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
22352        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22353          parsedMessage = (org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) e.getUnfinishedMessage();
22354          throw e;
22355        } finally {
22356          if (parsedMessage != null) {
22357            mergeFrom(parsedMessage);
22358          }
22359        }
22360        return this;
22361      }
22362      private int bitField0_;
22363
22364      // required uint64 nextDirectiveId = 1;
22365      private long nextDirectiveId_ ;
22366      /**
22367       * <code>required uint64 nextDirectiveId = 1;</code>
22368       */
22369      public boolean hasNextDirectiveId() {
22370        return ((bitField0_ & 0x00000001) == 0x00000001);
22371      }
22372      /**
22373       * <code>required uint64 nextDirectiveId = 1;</code>
22374       */
22375      public long getNextDirectiveId() {
22376        return nextDirectiveId_;
22377      }
22378      /**
22379       * <code>required uint64 nextDirectiveId = 1;</code>
22380       */
22381      public Builder setNextDirectiveId(long value) {
22382        bitField0_ |= 0x00000001;
22383        nextDirectiveId_ = value;
22384        onChanged();
22385        return this;
22386      }
22387      /**
22388       * <code>required uint64 nextDirectiveId = 1;</code>
22389       */
22390      public Builder clearNextDirectiveId() {
22391        bitField0_ = (bitField0_ & ~0x00000001);
22392        nextDirectiveId_ = 0L;
22393        onChanged();
22394        return this;
22395      }
22396
22397      // required uint32 numPools = 2;
22398      private int numPools_ ;
22399      /**
22400       * <code>required uint32 numPools = 2;</code>
22401       */
22402      public boolean hasNumPools() {
22403        return ((bitField0_ & 0x00000002) == 0x00000002);
22404      }
22405      /**
22406       * <code>required uint32 numPools = 2;</code>
22407       */
22408      public int getNumPools() {
22409        return numPools_;
22410      }
22411      /**
22412       * <code>required uint32 numPools = 2;</code>
22413       */
22414      public Builder setNumPools(int value) {
22415        bitField0_ |= 0x00000002;
22416        numPools_ = value;
22417        onChanged();
22418        return this;
22419      }
22420      /**
22421       * <code>required uint32 numPools = 2;</code>
22422       */
22423      public Builder clearNumPools() {
22424        bitField0_ = (bitField0_ & ~0x00000002);
22425        numPools_ = 0;
22426        onChanged();
22427        return this;
22428      }
22429
22430      // required uint32 numDirectives = 3;
22431      private int numDirectives_ ;
22432      /**
22433       * <code>required uint32 numDirectives = 3;</code>
22434       *
22435       * <pre>
22436       * repeated CachePoolInfoProto pools
22437       * repeated CacheDirectiveInfoProto directives
22438       * </pre>
22439       */
22440      public boolean hasNumDirectives() {
22441        return ((bitField0_ & 0x00000004) == 0x00000004);
22442      }
22443      /**
22444       * <code>required uint32 numDirectives = 3;</code>
22445       *
22446       * <pre>
22447       * repeated CachePoolInfoProto pools
22448       * repeated CacheDirectiveInfoProto directives
22449       * </pre>
22450       */
22451      public int getNumDirectives() {
22452        return numDirectives_;
22453      }
22454      /**
22455       * <code>required uint32 numDirectives = 3;</code>
22456       *
22457       * <pre>
22458       * repeated CachePoolInfoProto pools
22459       * repeated CacheDirectiveInfoProto directives
22460       * </pre>
22461       */
22462      public Builder setNumDirectives(int value) {
22463        bitField0_ |= 0x00000004;
22464        numDirectives_ = value;
22465        onChanged();
22466        return this;
22467      }
22468      /**
22469       * <code>required uint32 numDirectives = 3;</code>
22470       *
22471       * <pre>
22472       * repeated CachePoolInfoProto pools
22473       * repeated CacheDirectiveInfoProto directives
22474       * </pre>
22475       */
22476      public Builder clearNumDirectives() {
22477        bitField0_ = (bitField0_ & ~0x00000004);
22478        numDirectives_ = 0;
22479        onChanged();
22480        return this;
22481      }
22482
22483      // @@protoc_insertion_point(builder_scope:hadoop.hdfs.fsimage.CacheManagerSection)
22484    }
22485
22486    static {
22487      defaultInstance = new CacheManagerSection(true);
22488      defaultInstance.initFields();
22489    }
22490
22491    // @@protoc_insertion_point(class_scope:hadoop.hdfs.fsimage.CacheManagerSection)
22492  }
22493
22494  private static com.google.protobuf.Descriptors.Descriptor
22495    internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor;
22496  private static
22497    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22498      internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable;
22499  private static com.google.protobuf.Descriptors.Descriptor
22500    internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor;
22501  private static
22502    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22503      internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable;
22504  private static com.google.protobuf.Descriptors.Descriptor
22505    internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor;
22506  private static
22507    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22508      internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable;
22509  private static com.google.protobuf.Descriptors.Descriptor
22510    internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor;
22511  private static
22512    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22513      internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable;
22514  private static com.google.protobuf.Descriptors.Descriptor
22515    internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor;
22516  private static
22517    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22518      internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable;
22519  private static com.google.protobuf.Descriptors.Descriptor
22520    internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor;
22521  private static
22522    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22523      internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable;
22524  private static com.google.protobuf.Descriptors.Descriptor
22525    internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor;
22526  private static
22527    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22528      internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_fieldAccessorTable;
22529  private static com.google.protobuf.Descriptors.Descriptor
22530    internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor;
22531  private static
22532    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22533      internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_fieldAccessorTable;
22534  private static com.google.protobuf.Descriptors.Descriptor
22535    internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor;
22536  private static
22537    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22538      internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable;
22539  private static com.google.protobuf.Descriptors.Descriptor
22540    internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor;
22541  private static
22542    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22543      internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable;
22544  private static com.google.protobuf.Descriptors.Descriptor
22545    internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor;
22546  private static
22547    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22548      internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable;
22549  private static com.google.protobuf.Descriptors.Descriptor
22550    internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor;
22551  private static
22552    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22553      internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable;
22554  private static com.google.protobuf.Descriptors.Descriptor
22555    internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor;
22556  private static
22557    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22558      internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable;
22559  private static com.google.protobuf.Descriptors.Descriptor
22560    internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor;
22561  private static
22562    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22563      internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable;
22564  private static com.google.protobuf.Descriptors.Descriptor
22565    internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor;
22566  private static
22567    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22568      internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable;
22569  private static com.google.protobuf.Descriptors.Descriptor
22570    internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor;
22571  private static
22572    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22573      internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable;
22574  private static com.google.protobuf.Descriptors.Descriptor
22575    internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor;
22576  private static
22577    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22578      internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable;
22579  private static com.google.protobuf.Descriptors.Descriptor
22580    internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor;
22581  private static
22582    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22583      internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable;
22584  private static com.google.protobuf.Descriptors.Descriptor
22585    internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor;
22586  private static
22587    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22588      internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable;
22589  private static com.google.protobuf.Descriptors.Descriptor
22590    internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor;
22591  private static
22592    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22593      internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable;
22594  private static com.google.protobuf.Descriptors.Descriptor
22595    internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor;
22596  private static
22597    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22598      internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable;
22599  private static com.google.protobuf.Descriptors.Descriptor
22600    internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor;
22601  private static
22602    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22603      internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable;
22604  private static com.google.protobuf.Descriptors.Descriptor
22605    internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor;
22606  private static
22607    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22608      internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable;
22609  private static com.google.protobuf.Descriptors.Descriptor
22610    internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor;
22611  private static
22612    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22613      internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable;
22614  private static com.google.protobuf.Descriptors.Descriptor
22615    internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor;
22616  private static
22617    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22618      internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable;
22619  private static com.google.protobuf.Descriptors.Descriptor
22620    internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor;
22621  private static
22622    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22623      internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable;
22624  private static com.google.protobuf.Descriptors.Descriptor
22625    internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor;
22626  private static
22627    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22628      internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable;
22629  private static com.google.protobuf.Descriptors.Descriptor
22630    internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor;
22631  private static
22632    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22633      internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable;
22634  private static com.google.protobuf.Descriptors.Descriptor
22635    internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor;
22636  private static
22637    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22638      internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable;
22639  private static com.google.protobuf.Descriptors.Descriptor
22640    internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor;
22641  private static
22642    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22643      internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable;
22644  private static com.google.protobuf.Descriptors.Descriptor
22645    internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor;
22646  private static
22647    com.google.protobuf.GeneratedMessage.FieldAccessorTable
22648      internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable;
22649
22650  public static com.google.protobuf.Descriptors.FileDescriptor
22651      getDescriptor() {
22652    return descriptor;
22653  }
22654  private static com.google.protobuf.Descriptors.FileDescriptor
22655      descriptor;
22656  static {
22657    java.lang.String[] descriptorData = {
22658      "\n\rfsimage.proto\022\023hadoop.hdfs.fsimage\032\nhd" +
22659      "fs.proto\032\tacl.proto\032\013xattr.proto\"\277\001\n\013Fil" +
22660      "eSummary\022\025\n\rondiskVersion\030\001 \002(\r\022\025\n\rlayou" +
22661      "tVersion\030\002 \002(\r\022\r\n\005codec\030\003 \001(\t\022:\n\010section" +
22662      "s\030\004 \003(\0132(.hadoop.hdfs.fsimage.FileSummar" +
22663      "y.Section\0327\n\007Section\022\014\n\004name\030\001 \001(\t\022\016\n\006le" +
22664      "ngth\030\002 \001(\004\022\016\n\006offset\030\003 \001(\004\"\277\001\n\021NameSyste" +
22665      "mSection\022\023\n\013namespaceId\030\001 \001(\r\022\022\n\ngenstam" +
22666      "pV1\030\002 \001(\004\022\022\n\ngenstampV2\030\003 \001(\004\022\027\n\017genstam" +
22667      "pV1Limit\030\004 \001(\004\022\034\n\024lastAllocatedBlockId\030\005",
22668      " \001(\004\022\025\n\rtransactionId\030\006 \001(\004\022\037\n\027rollingUp" +
22669      "gradeStartTime\030\007 \001(\004\"\315\n\n\014INodeSection\022\023\n" +
22670      "\013lastInodeId\030\001 \001(\004\022\021\n\tnumInodes\030\002 \001(\004\032I\n" +
22671      "\034FileUnderConstructionFeature\022\022\n\nclientN" +
22672      "ame\030\001 \001(\t\022\025\n\rclientMachine\030\002 \001(\t\032&\n\017AclF" +
22673      "eatureProto\022\023\n\007entries\030\002 \003(\007B\002\020\001\0320\n\021XAtt" +
22674      "rCompactProto\022\014\n\004name\030\001 \002(\007\022\r\n\005value\030\002 \001" +
22675      "(\014\032X\n\021XAttrFeatureProto\022C\n\006xAttrs\030\001 \003(\0132" +
22676      "3.hadoop.hdfs.fsimage.INodeSection.XAttr" +
22677      "CompactProto\032\374\002\n\tINodeFile\022\023\n\013replicatio",
22678      "n\030\001 \001(\r\022\030\n\020modificationTime\030\002 \001(\004\022\022\n\nacc" +
22679      "essTime\030\003 \001(\004\022\032\n\022preferredBlockSize\030\004 \001(" +
22680      "\004\022\022\n\npermission\030\005 \001(\006\022\'\n\006blocks\030\006 \003(\0132\027." +
22681      "hadoop.hdfs.BlockProto\022N\n\006fileUC\030\007 \001(\0132>" +
22682      ".hadoop.hdfs.fsimage.INodeSection.FileUn" +
22683      "derConstructionFeature\022>\n\003acl\030\010 \001(\01321.ha" +
22684      "doop.hdfs.fsimage.INodeSection.AclFeatur" +
22685      "eProto\022C\n\006xAttrs\030\t \001(\01323.hadoop.hdfs.fsi" +
22686      "mage.INodeSection.XAttrFeatureProto\032\345\001\n\016" +
22687      "INodeDirectory\022\030\n\020modificationTime\030\001 \001(\004",
22688      "\022\017\n\007nsQuota\030\002 \001(\004\022\017\n\007dsQuota\030\003 \001(\004\022\022\n\npe" +
22689      "rmission\030\004 \001(\006\022>\n\003acl\030\005 \001(\01321.hadoop.hdf" +
22690      "s.fsimage.INodeSection.AclFeatureProto\022C" +
22691      "\n\006xAttrs\030\006 \001(\01323.hadoop.hdfs.fsimage.INo" +
22692      "deSection.XAttrFeatureProto\032`\n\014INodeSyml" +
22693      "ink\022\022\n\npermission\030\001 \001(\006\022\016\n\006target\030\002 \001(\014\022" +
22694      "\030\n\020modificationTime\030\003 \001(\004\022\022\n\naccessTime\030" +
22695      "\004 \001(\004\032\314\002\n\005INode\022:\n\004type\030\001 \002(\0162,.hadoop.h" +
22696      "dfs.fsimage.INodeSection.INode.Type\022\n\n\002i" +
22697      "d\030\002 \002(\004\022\014\n\004name\030\003 \001(\014\0229\n\004file\030\004 \001(\0132+.ha",
22698      "doop.hdfs.fsimage.INodeSection.INodeFile" +
22699      "\022C\n\tdirectory\030\005 \001(\01320.hadoop.hdfs.fsimag" +
22700      "e.INodeSection.INodeDirectory\022?\n\007symlink" +
22701      "\030\006 \001(\0132..hadoop.hdfs.fsimage.INodeSectio" +
22702      "n.INodeSymlink\",\n\004Type\022\010\n\004FILE\020\001\022\r\n\tDIRE" +
22703      "CTORY\020\002\022\013\n\007SYMLINK\020\003\"`\n\035FilesUnderConstr" +
22704      "uctionSection\032?\n\032FileUnderConstructionEn" +
22705      "try\022\017\n\007inodeId\030\001 \001(\004\022\020\n\010fullPath\030\002 \001(\t\"b" +
22706      "\n\025INodeDirectorySection\032I\n\010DirEntry\022\016\n\006p" +
22707      "arent\030\001 \001(\004\022\024\n\010children\030\002 \003(\004B\002\020\001\022\027\n\013ref",
22708      "Children\030\003 \003(\rB\002\020\001\"z\n\025INodeReferenceSect" +
22709      "ion\032a\n\016INodeReference\022\022\n\nreferredId\030\001 \001(" +
22710      "\004\022\014\n\004name\030\002 \001(\014\022\025\n\rdstSnapshotId\030\003 \001(\r\022\026" +
22711      "\n\016lastSnapshotId\030\004 \001(\r\"\265\001\n\017SnapshotSecti" +
22712      "on\022\027\n\017snapshotCounter\030\001 \001(\r\022\034\n\020snapshott" +
22713      "ableDir\030\002 \003(\004B\002\020\001\022\024\n\014numSnapshots\030\003 \001(\r\032" +
22714      "U\n\010Snapshot\022\022\n\nsnapshotId\030\001 \001(\r\0225\n\004root\030" +
22715      "\002 \001(\0132\'.hadoop.hdfs.fsimage.INodeSection" +
22716      ".INode\"\327\004\n\023SnapshotDiffSection\032 \n\020Create" +
22717      "dListEntry\022\014\n\004name\030\001 \001(\014\032\367\001\n\rDirectoryDi",
22718      "ff\022\022\n\nsnapshotId\030\001 \001(\r\022\024\n\014childrenSize\030\002" +
22719      " \001(\r\022\026\n\016isSnapshotRoot\030\003 \001(\010\022\014\n\004name\030\004 \001" +
22720      "(\014\022F\n\014snapshotCopy\030\005 \001(\01320.hadoop.hdfs.f" +
22721      "simage.INodeSection.INodeDirectory\022\027\n\017cr" +
22722      "eatedListSize\030\006 \001(\r\022\030\n\014deletedINode\030\007 \003(" +
22723      "\004B\002\020\001\022\033\n\017deletedINodeRef\030\010 \003(\rB\002\020\001\032\201\001\n\010F" +
22724      "ileDiff\022\022\n\nsnapshotId\030\001 \001(\r\022\020\n\010fileSize\030" +
22725      "\002 \001(\004\022\014\n\004name\030\003 \001(\014\022A\n\014snapshotCopy\030\004 \001(" +
22726      "\0132+.hadoop.hdfs.fsimage.INodeSection.INo" +
22727      "deFile\032\237\001\n\tDiffEntry\022E\n\004type\030\001 \002(\01627.had",
22728      "oop.hdfs.fsimage.SnapshotDiffSection.Dif" +
22729      "fEntry.Type\022\017\n\007inodeId\030\002 \001(\004\022\021\n\tnumOfDif" +
22730      "f\030\003 \001(\r\"\'\n\004Type\022\014\n\010FILEDIFF\020\001\022\021\n\rDIRECTO" +
22731      "RYDIFF\020\002\"H\n\022StringTableSection\022\020\n\010numEnt" +
22732      "ry\030\001 \001(\r\032 \n\005Entry\022\n\n\002id\030\001 \001(\r\022\013\n\003str\030\002 \001" +
22733      "(\t\"\341\002\n\024SecretManagerSection\022\021\n\tcurrentId" +
22734      "\030\001 \001(\r\022\033\n\023tokenSequenceNumber\030\002 \001(\r\022\017\n\007n" +
22735      "umKeys\030\003 \001(\r\022\021\n\tnumTokens\030\004 \001(\r\032<\n\rDeleg" +
22736      "ationKey\022\n\n\002id\030\001 \001(\r\022\022\n\nexpiryDate\030\002 \001(\004" +
22737      "\022\013\n\003key\030\003 \001(\014\032\266\001\n\014PersistToken\022\017\n\007versio",
22738      "n\030\001 \001(\r\022\r\n\005owner\030\002 \001(\t\022\017\n\007renewer\030\003 \001(\t\022" +
22739      "\020\n\010realUser\030\004 \001(\t\022\021\n\tissueDate\030\005 \001(\004\022\017\n\007" +
22740      "maxDate\030\006 \001(\004\022\026\n\016sequenceNumber\030\007 \001(\r\022\023\n" +
22741      "\013masterKeyId\030\010 \001(\r\022\022\n\nexpiryDate\030\t \001(\004\"W" +
22742      "\n\023CacheManagerSection\022\027\n\017nextDirectiveId" +
22743      "\030\001 \002(\004\022\020\n\010numPools\030\002 \002(\r\022\025\n\rnumDirective" +
22744      "s\030\003 \002(\rB6\n&org.apache.hadoop.hdfs.server" +
22745      ".namenodeB\014FsImageProto"
22746    };
22747    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
22748      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
22749        public com.google.protobuf.ExtensionRegistry assignDescriptors(
22750            com.google.protobuf.Descriptors.FileDescriptor root) {
22751          descriptor = root;
22752          internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor =
22753            getDescriptor().getMessageTypes().get(0);
22754          internal_static_hadoop_hdfs_fsimage_FileSummary_fieldAccessorTable = new
22755            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22756              internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor,
22757              new java.lang.String[] { "OndiskVersion", "LayoutVersion", "Codec", "Sections", });
22758          internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor =
22759            internal_static_hadoop_hdfs_fsimage_FileSummary_descriptor.getNestedTypes().get(0);
22760          internal_static_hadoop_hdfs_fsimage_FileSummary_Section_fieldAccessorTable = new
22761            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22762              internal_static_hadoop_hdfs_fsimage_FileSummary_Section_descriptor,
22763              new java.lang.String[] { "Name", "Length", "Offset", });
22764          internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor =
22765            getDescriptor().getMessageTypes().get(1);
22766          internal_static_hadoop_hdfs_fsimage_NameSystemSection_fieldAccessorTable = new
22767            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22768              internal_static_hadoop_hdfs_fsimage_NameSystemSection_descriptor,
22769              new java.lang.String[] { "NamespaceId", "GenstampV1", "GenstampV2", "GenstampV1Limit", "LastAllocatedBlockId", "TransactionId", "RollingUpgradeStartTime", });
22770          internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor =
22771            getDescriptor().getMessageTypes().get(2);
22772          internal_static_hadoop_hdfs_fsimage_INodeSection_fieldAccessorTable = new
22773            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22774              internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor,
22775              new java.lang.String[] { "LastInodeId", "NumInodes", });
22776          internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor =
22777            internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(0);
22778          internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_fieldAccessorTable = new
22779            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22780              internal_static_hadoop_hdfs_fsimage_INodeSection_FileUnderConstructionFeature_descriptor,
22781              new java.lang.String[] { "ClientName", "ClientMachine", });
22782          internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor =
22783            internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(1);
22784          internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_fieldAccessorTable = new
22785            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22786              internal_static_hadoop_hdfs_fsimage_INodeSection_AclFeatureProto_descriptor,
22787              new java.lang.String[] { "Entries", });
22788          internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor =
22789            internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(2);
22790          internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_fieldAccessorTable = new
22791            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22792              internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrCompactProto_descriptor,
22793              new java.lang.String[] { "Name", "Value", });
22794          internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor =
22795            internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(3);
22796          internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_fieldAccessorTable = new
22797            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22798              internal_static_hadoop_hdfs_fsimage_INodeSection_XAttrFeatureProto_descriptor,
22799              new java.lang.String[] { "XAttrs", });
22800          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor =
22801            internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(4);
22802          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_fieldAccessorTable = new
22803            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22804              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeFile_descriptor,
22805              new java.lang.String[] { "Replication", "ModificationTime", "AccessTime", "PreferredBlockSize", "Permission", "Blocks", "FileUC", "Acl", "XAttrs", });
22806          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor =
22807            internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(5);
22808          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_fieldAccessorTable = new
22809            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22810              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeDirectory_descriptor,
22811              new java.lang.String[] { "ModificationTime", "NsQuota", "DsQuota", "Permission", "Acl", "XAttrs", });
22812          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor =
22813            internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(6);
22814          internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_fieldAccessorTable = new
22815            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22816              internal_static_hadoop_hdfs_fsimage_INodeSection_INodeSymlink_descriptor,
22817              new java.lang.String[] { "Permission", "Target", "ModificationTime", "AccessTime", });
22818          internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor =
22819            internal_static_hadoop_hdfs_fsimage_INodeSection_descriptor.getNestedTypes().get(7);
22820          internal_static_hadoop_hdfs_fsimage_INodeSection_INode_fieldAccessorTable = new
22821            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22822              internal_static_hadoop_hdfs_fsimage_INodeSection_INode_descriptor,
22823              new java.lang.String[] { "Type", "Id", "Name", "File", "Directory", "Symlink", });
22824          internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor =
22825            getDescriptor().getMessageTypes().get(3);
22826          internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_fieldAccessorTable = new
22827            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22828              internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor,
22829              new java.lang.String[] { });
22830          internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor =
22831            internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_descriptor.getNestedTypes().get(0);
22832          internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_fieldAccessorTable = new
22833            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22834              internal_static_hadoop_hdfs_fsimage_FilesUnderConstructionSection_FileUnderConstructionEntry_descriptor,
22835              new java.lang.String[] { "InodeId", "FullPath", });
22836          internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor =
22837            getDescriptor().getMessageTypes().get(4);
22838          internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_fieldAccessorTable = new
22839            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22840              internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor,
22841              new java.lang.String[] { });
22842          internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor =
22843            internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_descriptor.getNestedTypes().get(0);
22844          internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_fieldAccessorTable = new
22845            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22846              internal_static_hadoop_hdfs_fsimage_INodeDirectorySection_DirEntry_descriptor,
22847              new java.lang.String[] { "Parent", "Children", "RefChildren", });
22848          internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor =
22849            getDescriptor().getMessageTypes().get(5);
22850          internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_fieldAccessorTable = new
22851            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22852              internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor,
22853              new java.lang.String[] { });
22854          internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor =
22855            internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_descriptor.getNestedTypes().get(0);
22856          internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_fieldAccessorTable = new
22857            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22858              internal_static_hadoop_hdfs_fsimage_INodeReferenceSection_INodeReference_descriptor,
22859              new java.lang.String[] { "ReferredId", "Name", "DstSnapshotId", "LastSnapshotId", });
22860          internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor =
22861            getDescriptor().getMessageTypes().get(6);
22862          internal_static_hadoop_hdfs_fsimage_SnapshotSection_fieldAccessorTable = new
22863            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22864              internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor,
22865              new java.lang.String[] { "SnapshotCounter", "SnapshottableDir", "NumSnapshots", });
22866          internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor =
22867            internal_static_hadoop_hdfs_fsimage_SnapshotSection_descriptor.getNestedTypes().get(0);
22868          internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_fieldAccessorTable = new
22869            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22870              internal_static_hadoop_hdfs_fsimage_SnapshotSection_Snapshot_descriptor,
22871              new java.lang.String[] { "SnapshotId", "Root", });
22872          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor =
22873            getDescriptor().getMessageTypes().get(7);
22874          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_fieldAccessorTable = new
22875            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22876              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor,
22877              new java.lang.String[] { });
22878          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor =
22879            internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(0);
22880          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_fieldAccessorTable = new
22881            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22882              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_CreatedListEntry_descriptor,
22883              new java.lang.String[] { "Name", });
22884          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor =
22885            internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(1);
22886          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_fieldAccessorTable = new
22887            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22888              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DirectoryDiff_descriptor,
22889              new java.lang.String[] { "SnapshotId", "ChildrenSize", "IsSnapshotRoot", "Name", "SnapshotCopy", "CreatedListSize", "DeletedINode", "DeletedINodeRef", });
22890          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor =
22891            internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(2);
22892          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_fieldAccessorTable = new
22893            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22894              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_FileDiff_descriptor,
22895              new java.lang.String[] { "SnapshotId", "FileSize", "Name", "SnapshotCopy", });
22896          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor =
22897            internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_descriptor.getNestedTypes().get(3);
22898          internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_fieldAccessorTable = new
22899            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22900              internal_static_hadoop_hdfs_fsimage_SnapshotDiffSection_DiffEntry_descriptor,
22901              new java.lang.String[] { "Type", "InodeId", "NumOfDiff", });
22902          internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor =
22903            getDescriptor().getMessageTypes().get(8);
22904          internal_static_hadoop_hdfs_fsimage_StringTableSection_fieldAccessorTable = new
22905            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22906              internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor,
22907              new java.lang.String[] { "NumEntry", });
22908          internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor =
22909            internal_static_hadoop_hdfs_fsimage_StringTableSection_descriptor.getNestedTypes().get(0);
22910          internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_fieldAccessorTable = new
22911            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22912              internal_static_hadoop_hdfs_fsimage_StringTableSection_Entry_descriptor,
22913              new java.lang.String[] { "Id", "Str", });
22914          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor =
22915            getDescriptor().getMessageTypes().get(9);
22916          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_fieldAccessorTable = new
22917            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22918              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor,
22919              new java.lang.String[] { "CurrentId", "TokenSequenceNumber", "NumKeys", "NumTokens", });
22920          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor =
22921            internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor.getNestedTypes().get(0);
22922          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_fieldAccessorTable = new
22923            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22924              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_DelegationKey_descriptor,
22925              new java.lang.String[] { "Id", "ExpiryDate", "Key", });
22926          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor =
22927            internal_static_hadoop_hdfs_fsimage_SecretManagerSection_descriptor.getNestedTypes().get(1);
22928          internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_fieldAccessorTable = new
22929            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22930              internal_static_hadoop_hdfs_fsimage_SecretManagerSection_PersistToken_descriptor,
22931              new java.lang.String[] { "Version", "Owner", "Renewer", "RealUser", "IssueDate", "MaxDate", "SequenceNumber", "MasterKeyId", "ExpiryDate", });
22932          internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor =
22933            getDescriptor().getMessageTypes().get(10);
22934          internal_static_hadoop_hdfs_fsimage_CacheManagerSection_fieldAccessorTable = new
22935            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
22936              internal_static_hadoop_hdfs_fsimage_CacheManagerSection_descriptor,
22937              new java.lang.String[] { "NextDirectiveId", "NumPools", "NumDirectives", });
22938          return null;
22939        }
22940      };
22941    com.google.protobuf.Descriptors.FileDescriptor
22942      .internalBuildGeneratedFileFrom(descriptorData,
22943        new com.google.protobuf.Descriptors.FileDescriptor[] {
22944          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.getDescriptor(),
22945          org.apache.hadoop.hdfs.protocol.proto.AclProtos.getDescriptor(),
22946          org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.getDescriptor(),
22947        }, assigner);
22948  }
22949
22950  // @@protoc_insertion_point(outer_class_scope)
22951}