001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: RpcHeader.proto
003
004package org.apache.hadoop.ipc.protobuf;
005
006public final class RpcHeaderProtos {
007  private RpcHeaderProtos() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  /**
012   * Protobuf enum {@code hadoop.common.RpcKindProto}
013   *
014   * <pre>
015   **
016   * RpcKind determine the rpcEngine and the serialization of the rpc request
017   * </pre>
018   */
019  public enum RpcKindProto
020      implements com.google.protobuf.ProtocolMessageEnum {
021    /**
022     * <code>RPC_BUILTIN = 0;</code>
023     *
024     * <pre>
025     * Used for built in calls by tests
026     * </pre>
027     */
028    RPC_BUILTIN(0, 0),
029    /**
030     * <code>RPC_WRITABLE = 1;</code>
031     *
032     * <pre>
033     * Use WritableRpcEngine 
034     * </pre>
035     */
036    RPC_WRITABLE(1, 1),
037    /**
038     * <code>RPC_PROTOCOL_BUFFER = 2;</code>
039     *
040     * <pre>
041     * Use ProtobufRpcEngine
042     * </pre>
043     */
044    RPC_PROTOCOL_BUFFER(2, 2),
045    ;
046
047    /**
048     * <code>RPC_BUILTIN = 0;</code>
049     *
050     * <pre>
051     * Used for built in calls by tests
052     * </pre>
053     */
054    public static final int RPC_BUILTIN_VALUE = 0;
055    /**
056     * <code>RPC_WRITABLE = 1;</code>
057     *
058     * <pre>
059     * Use WritableRpcEngine 
060     * </pre>
061     */
062    public static final int RPC_WRITABLE_VALUE = 1;
063    /**
064     * <code>RPC_PROTOCOL_BUFFER = 2;</code>
065     *
066     * <pre>
067     * Use ProtobufRpcEngine
068     * </pre>
069     */
070    public static final int RPC_PROTOCOL_BUFFER_VALUE = 2;
071
072
073    public final int getNumber() { return value; }
074
075    public static RpcKindProto valueOf(int value) {
076      switch (value) {
077        case 0: return RPC_BUILTIN;
078        case 1: return RPC_WRITABLE;
079        case 2: return RPC_PROTOCOL_BUFFER;
080        default: return null;
081      }
082    }
083
084    public static com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>
085        internalGetValueMap() {
086      return internalValueMap;
087    }
088    private static com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>
089        internalValueMap =
090          new com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>() {
091            public RpcKindProto findValueByNumber(int number) {
092              return RpcKindProto.valueOf(number);
093            }
094          };
095
096    public final com.google.protobuf.Descriptors.EnumValueDescriptor
097        getValueDescriptor() {
098      return getDescriptor().getValues().get(index);
099    }
100    public final com.google.protobuf.Descriptors.EnumDescriptor
101        getDescriptorForType() {
102      return getDescriptor();
103    }
104    public static final com.google.protobuf.Descriptors.EnumDescriptor
105        getDescriptor() {
106      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.getDescriptor().getEnumTypes().get(0);
107    }
108
109    private static final RpcKindProto[] VALUES = values();
110
111    public static RpcKindProto valueOf(
112        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
113      if (desc.getType() != getDescriptor()) {
114        throw new java.lang.IllegalArgumentException(
115          "EnumValueDescriptor is not for this type.");
116      }
117      return VALUES[desc.getIndex()];
118    }
119
120    private final int index;
121    private final int value;
122
123    private RpcKindProto(int index, int value) {
124      this.index = index;
125      this.value = value;
126    }
127
128    // @@protoc_insertion_point(enum_scope:hadoop.common.RpcKindProto)
129  }
130
131  public interface RPCTraceInfoProtoOrBuilder
132      extends com.google.protobuf.MessageOrBuilder {
133
134    // optional int64 traceId = 1;
135    /**
136     * <code>optional int64 traceId = 1;</code>
137     */
138    boolean hasTraceId();
139    /**
140     * <code>optional int64 traceId = 1;</code>
141     */
142    long getTraceId();
143
144    // optional int64 parentId = 2;
145    /**
146     * <code>optional int64 parentId = 2;</code>
147     */
148    boolean hasParentId();
149    /**
150     * <code>optional int64 parentId = 2;</code>
151     */
152    long getParentId();
153  }
154  /**
155   * Protobuf type {@code hadoop.common.RPCTraceInfoProto}
156   *
157   * <pre>
158   **
159   * Used to pass through the information necessary to continue
160   * a trace after an RPC is made. All we need is the traceid
161   * (so we know the overarching trace this message is a part of), and
162   * the id of the current span when this message was sent, so we know
163   * what span caused the new span we will create when this message is received.
164   * </pre>
165   */
166  public static final class RPCTraceInfoProto extends
167      com.google.protobuf.GeneratedMessage
168      implements RPCTraceInfoProtoOrBuilder {
169    // Use RPCTraceInfoProto.newBuilder() to construct.
170    private RPCTraceInfoProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
171      super(builder);
172      this.unknownFields = builder.getUnknownFields();
173    }
174    private RPCTraceInfoProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
175
176    private static final RPCTraceInfoProto defaultInstance;
177    public static RPCTraceInfoProto getDefaultInstance() {
178      return defaultInstance;
179    }
180
181    public RPCTraceInfoProto getDefaultInstanceForType() {
182      return defaultInstance;
183    }
184
185    private final com.google.protobuf.UnknownFieldSet unknownFields;
186    @java.lang.Override
187    public final com.google.protobuf.UnknownFieldSet
188        getUnknownFields() {
189      return this.unknownFields;
190    }
191    private RPCTraceInfoProto(
192        com.google.protobuf.CodedInputStream input,
193        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
194        throws com.google.protobuf.InvalidProtocolBufferException {
195      initFields();
196      int mutable_bitField0_ = 0;
197      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
198          com.google.protobuf.UnknownFieldSet.newBuilder();
199      try {
200        boolean done = false;
201        while (!done) {
202          int tag = input.readTag();
203          switch (tag) {
204            case 0:
205              done = true;
206              break;
207            default: {
208              if (!parseUnknownField(input, unknownFields,
209                                     extensionRegistry, tag)) {
210                done = true;
211              }
212              break;
213            }
214            case 8: {
215              bitField0_ |= 0x00000001;
216              traceId_ = input.readInt64();
217              break;
218            }
219            case 16: {
220              bitField0_ |= 0x00000002;
221              parentId_ = input.readInt64();
222              break;
223            }
224          }
225        }
226      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
227        throw e.setUnfinishedMessage(this);
228      } catch (java.io.IOException e) {
229        throw new com.google.protobuf.InvalidProtocolBufferException(
230            e.getMessage()).setUnfinishedMessage(this);
231      } finally {
232        this.unknownFields = unknownFields.build();
233        makeExtensionsImmutable();
234      }
235    }
236    public static final com.google.protobuf.Descriptors.Descriptor
237        getDescriptor() {
238      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
239    }
240
241    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
242        internalGetFieldAccessorTable() {
243      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable
244          .ensureFieldAccessorsInitialized(
245              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder.class);
246    }
247
248    public static com.google.protobuf.Parser<RPCTraceInfoProto> PARSER =
249        new com.google.protobuf.AbstractParser<RPCTraceInfoProto>() {
250      public RPCTraceInfoProto parsePartialFrom(
251          com.google.protobuf.CodedInputStream input,
252          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
253          throws com.google.protobuf.InvalidProtocolBufferException {
254        return new RPCTraceInfoProto(input, extensionRegistry);
255      }
256    };
257
258    @java.lang.Override
259    public com.google.protobuf.Parser<RPCTraceInfoProto> getParserForType() {
260      return PARSER;
261    }
262
263    private int bitField0_;
264    // optional int64 traceId = 1;
265    public static final int TRACEID_FIELD_NUMBER = 1;
266    private long traceId_;
267    /**
268     * <code>optional int64 traceId = 1;</code>
269     */
270    public boolean hasTraceId() {
271      return ((bitField0_ & 0x00000001) == 0x00000001);
272    }
273    /**
274     * <code>optional int64 traceId = 1;</code>
275     */
276    public long getTraceId() {
277      return traceId_;
278    }
279
280    // optional int64 parentId = 2;
281    public static final int PARENTID_FIELD_NUMBER = 2;
282    private long parentId_;
283    /**
284     * <code>optional int64 parentId = 2;</code>
285     */
286    public boolean hasParentId() {
287      return ((bitField0_ & 0x00000002) == 0x00000002);
288    }
289    /**
290     * <code>optional int64 parentId = 2;</code>
291     */
292    public long getParentId() {
293      return parentId_;
294    }
295
296    private void initFields() {
297      traceId_ = 0L;
298      parentId_ = 0L;
299    }
300    private byte memoizedIsInitialized = -1;
301    public final boolean isInitialized() {
302      byte isInitialized = memoizedIsInitialized;
303      if (isInitialized != -1) return isInitialized == 1;
304
305      memoizedIsInitialized = 1;
306      return true;
307    }
308
309    public void writeTo(com.google.protobuf.CodedOutputStream output)
310                        throws java.io.IOException {
311      getSerializedSize();
312      if (((bitField0_ & 0x00000001) == 0x00000001)) {
313        output.writeInt64(1, traceId_);
314      }
315      if (((bitField0_ & 0x00000002) == 0x00000002)) {
316        output.writeInt64(2, parentId_);
317      }
318      getUnknownFields().writeTo(output);
319    }
320
321    private int memoizedSerializedSize = -1;
322    public int getSerializedSize() {
323      int size = memoizedSerializedSize;
324      if (size != -1) return size;
325
326      size = 0;
327      if (((bitField0_ & 0x00000001) == 0x00000001)) {
328        size += com.google.protobuf.CodedOutputStream
329          .computeInt64Size(1, traceId_);
330      }
331      if (((bitField0_ & 0x00000002) == 0x00000002)) {
332        size += com.google.protobuf.CodedOutputStream
333          .computeInt64Size(2, parentId_);
334      }
335      size += getUnknownFields().getSerializedSize();
336      memoizedSerializedSize = size;
337      return size;
338    }
339
340    private static final long serialVersionUID = 0L;
341    @java.lang.Override
342    protected java.lang.Object writeReplace()
343        throws java.io.ObjectStreamException {
344      return super.writeReplace();
345    }
346
347    @java.lang.Override
348    public boolean equals(final java.lang.Object obj) {
349      if (obj == this) {
350       return true;
351      }
352      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto)) {
353        return super.equals(obj);
354      }
355      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) obj;
356
357      boolean result = true;
358      result = result && (hasTraceId() == other.hasTraceId());
359      if (hasTraceId()) {
360        result = result && (getTraceId()
361            == other.getTraceId());
362      }
363      result = result && (hasParentId() == other.hasParentId());
364      if (hasParentId()) {
365        result = result && (getParentId()
366            == other.getParentId());
367      }
368      result = result &&
369          getUnknownFields().equals(other.getUnknownFields());
370      return result;
371    }
372
373    private int memoizedHashCode = 0;
374    @java.lang.Override
375    public int hashCode() {
376      if (memoizedHashCode != 0) {
377        return memoizedHashCode;
378      }
379      int hash = 41;
380      hash = (19 * hash) + getDescriptorForType().hashCode();
381      if (hasTraceId()) {
382        hash = (37 * hash) + TRACEID_FIELD_NUMBER;
383        hash = (53 * hash) + hashLong(getTraceId());
384      }
385      if (hasParentId()) {
386        hash = (37 * hash) + PARENTID_FIELD_NUMBER;
387        hash = (53 * hash) + hashLong(getParentId());
388      }
389      hash = (29 * hash) + getUnknownFields().hashCode();
390      memoizedHashCode = hash;
391      return hash;
392    }
393
394    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
395        com.google.protobuf.ByteString data)
396        throws com.google.protobuf.InvalidProtocolBufferException {
397      return PARSER.parseFrom(data);
398    }
399    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
400        com.google.protobuf.ByteString data,
401        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
402        throws com.google.protobuf.InvalidProtocolBufferException {
403      return PARSER.parseFrom(data, extensionRegistry);
404    }
405    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(byte[] data)
406        throws com.google.protobuf.InvalidProtocolBufferException {
407      return PARSER.parseFrom(data);
408    }
409    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
410        byte[] data,
411        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
412        throws com.google.protobuf.InvalidProtocolBufferException {
413      return PARSER.parseFrom(data, extensionRegistry);
414    }
415    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(java.io.InputStream input)
416        throws java.io.IOException {
417      return PARSER.parseFrom(input);
418    }
419    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
420        java.io.InputStream input,
421        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
422        throws java.io.IOException {
423      return PARSER.parseFrom(input, extensionRegistry);
424    }
425    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseDelimitedFrom(java.io.InputStream input)
426        throws java.io.IOException {
427      return PARSER.parseDelimitedFrom(input);
428    }
429    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseDelimitedFrom(
430        java.io.InputStream input,
431        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
432        throws java.io.IOException {
433      return PARSER.parseDelimitedFrom(input, extensionRegistry);
434    }
435    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
436        com.google.protobuf.CodedInputStream input)
437        throws java.io.IOException {
438      return PARSER.parseFrom(input);
439    }
440    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
441        com.google.protobuf.CodedInputStream input,
442        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
443        throws java.io.IOException {
444      return PARSER.parseFrom(input, extensionRegistry);
445    }
446
447    public static Builder newBuilder() { return Builder.create(); }
448    public Builder newBuilderForType() { return newBuilder(); }
449    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto prototype) {
450      return newBuilder().mergeFrom(prototype);
451    }
452    public Builder toBuilder() { return newBuilder(this); }
453
454    @java.lang.Override
455    protected Builder newBuilderForType(
456        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
457      Builder builder = new Builder(parent);
458      return builder;
459    }
460    /**
461     * Protobuf type {@code hadoop.common.RPCTraceInfoProto}
462     *
463     * <pre>
464     **
465     * Used to pass through the information necessary to continue
466     * a trace after an RPC is made. All we need is the traceid
467     * (so we know the overarching trace this message is a part of), and
468     * the id of the current span when this message was sent, so we know
469     * what span caused the new span we will create when this message is received.
470     * </pre>
471     */
472    public static final class Builder extends
473        com.google.protobuf.GeneratedMessage.Builder<Builder>
474       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder {
475      public static final com.google.protobuf.Descriptors.Descriptor
476          getDescriptor() {
477        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
478      }
479
480      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
481          internalGetFieldAccessorTable() {
482        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable
483            .ensureFieldAccessorsInitialized(
484                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder.class);
485      }
486
487      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.newBuilder()
488      private Builder() {
489        maybeForceBuilderInitialization();
490      }
491
492      private Builder(
493          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
494        super(parent);
495        maybeForceBuilderInitialization();
496      }
497      private void maybeForceBuilderInitialization() {
498        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
499        }
500      }
501      private static Builder create() {
502        return new Builder();
503      }
504
505      public Builder clear() {
506        super.clear();
507        traceId_ = 0L;
508        bitField0_ = (bitField0_ & ~0x00000001);
509        parentId_ = 0L;
510        bitField0_ = (bitField0_ & ~0x00000002);
511        return this;
512      }
513
514      public Builder clone() {
515        return create().mergeFrom(buildPartial());
516      }
517
518      public com.google.protobuf.Descriptors.Descriptor
519          getDescriptorForType() {
520        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
521      }
522
523      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getDefaultInstanceForType() {
524        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
525      }
526
527      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto build() {
528        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto result = buildPartial();
529        if (!result.isInitialized()) {
530          throw newUninitializedMessageException(result);
531        }
532        return result;
533      }
534
535      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto buildPartial() {
536        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto(this);
537        int from_bitField0_ = bitField0_;
538        int to_bitField0_ = 0;
539        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
540          to_bitField0_ |= 0x00000001;
541        }
542        result.traceId_ = traceId_;
543        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
544          to_bitField0_ |= 0x00000002;
545        }
546        result.parentId_ = parentId_;
547        result.bitField0_ = to_bitField0_;
548        onBuilt();
549        return result;
550      }
551
552      public Builder mergeFrom(com.google.protobuf.Message other) {
553        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) {
554          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto)other);
555        } else {
556          super.mergeFrom(other);
557          return this;
558        }
559      }
560
561      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto other) {
562        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance()) return this;
563        if (other.hasTraceId()) {
564          setTraceId(other.getTraceId());
565        }
566        if (other.hasParentId()) {
567          setParentId(other.getParentId());
568        }
569        this.mergeUnknownFields(other.getUnknownFields());
570        return this;
571      }
572
573      public final boolean isInitialized() {
574        return true;
575      }
576
577      public Builder mergeFrom(
578          com.google.protobuf.CodedInputStream input,
579          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
580          throws java.io.IOException {
581        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parsedMessage = null;
582        try {
583          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
584        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
585          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) e.getUnfinishedMessage();
586          throw e;
587        } finally {
588          if (parsedMessage != null) {
589            mergeFrom(parsedMessage);
590          }
591        }
592        return this;
593      }
594      private int bitField0_;
595
596      // optional int64 traceId = 1;
597      private long traceId_ ;
598      /**
599       * <code>optional int64 traceId = 1;</code>
600       */
601      public boolean hasTraceId() {
602        return ((bitField0_ & 0x00000001) == 0x00000001);
603      }
604      /**
605       * <code>optional int64 traceId = 1;</code>
606       */
607      public long getTraceId() {
608        return traceId_;
609      }
610      /**
611       * <code>optional int64 traceId = 1;</code>
612       */
613      public Builder setTraceId(long value) {
614        bitField0_ |= 0x00000001;
615        traceId_ = value;
616        onChanged();
617        return this;
618      }
619      /**
620       * <code>optional int64 traceId = 1;</code>
621       */
622      public Builder clearTraceId() {
623        bitField0_ = (bitField0_ & ~0x00000001);
624        traceId_ = 0L;
625        onChanged();
626        return this;
627      }
628
629      // optional int64 parentId = 2;
630      private long parentId_ ;
631      /**
632       * <code>optional int64 parentId = 2;</code>
633       */
634      public boolean hasParentId() {
635        return ((bitField0_ & 0x00000002) == 0x00000002);
636      }
637      /**
638       * <code>optional int64 parentId = 2;</code>
639       */
640      public long getParentId() {
641        return parentId_;
642      }
643      /**
644       * <code>optional int64 parentId = 2;</code>
645       */
646      public Builder setParentId(long value) {
647        bitField0_ |= 0x00000002;
648        parentId_ = value;
649        onChanged();
650        return this;
651      }
652      /**
653       * <code>optional int64 parentId = 2;</code>
654       */
655      public Builder clearParentId() {
656        bitField0_ = (bitField0_ & ~0x00000002);
657        parentId_ = 0L;
658        onChanged();
659        return this;
660      }
661
662      // @@protoc_insertion_point(builder_scope:hadoop.common.RPCTraceInfoProto)
663    }
664
665    static {
666      defaultInstance = new RPCTraceInfoProto(true);
667      defaultInstance.initFields();
668    }
669
670    // @@protoc_insertion_point(class_scope:hadoop.common.RPCTraceInfoProto)
671  }
672
673  public interface RpcRequestHeaderProtoOrBuilder
674      extends com.google.protobuf.MessageOrBuilder {
675
676    // optional .hadoop.common.RpcKindProto rpcKind = 1;
677    /**
678     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
679     */
680    boolean hasRpcKind();
681    /**
682     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
683     */
684    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind();
685
686    // optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
687    /**
688     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
689     */
690    boolean hasRpcOp();
691    /**
692     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
693     */
694    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp();
695
696    // required sint32 callId = 3;
697    /**
698     * <code>required sint32 callId = 3;</code>
699     *
700     * <pre>
701     * a sequence number that is sent back in response
702     * </pre>
703     */
704    boolean hasCallId();
705    /**
706     * <code>required sint32 callId = 3;</code>
707     *
708     * <pre>
709     * a sequence number that is sent back in response
710     * </pre>
711     */
712    int getCallId();
713
714    // required bytes clientId = 4;
715    /**
716     * <code>required bytes clientId = 4;</code>
717     *
718     * <pre>
719     * Globally unique client ID
720     * </pre>
721     */
722    boolean hasClientId();
723    /**
724     * <code>required bytes clientId = 4;</code>
725     *
726     * <pre>
727     * Globally unique client ID
728     * </pre>
729     */
730    com.google.protobuf.ByteString getClientId();
731
732    // optional sint32 retryCount = 5 [default = -1];
733    /**
734     * <code>optional sint32 retryCount = 5 [default = -1];</code>
735     *
736     * <pre>
737     * clientId + callId uniquely identifies a request
738     * retry count, 1 means this is the first retry
739     * </pre>
740     */
741    boolean hasRetryCount();
742    /**
743     * <code>optional sint32 retryCount = 5 [default = -1];</code>
744     *
745     * <pre>
746     * clientId + callId uniquely identifies a request
747     * retry count, 1 means this is the first retry
748     * </pre>
749     */
750    int getRetryCount();
751
752    // optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
753    /**
754     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
755     *
756     * <pre>
757     * tracing info
758     * </pre>
759     */
760    boolean hasTraceInfo();
761    /**
762     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
763     *
764     * <pre>
765     * tracing info
766     * </pre>
767     */
768    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo();
769    /**
770     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
771     *
772     * <pre>
773     * tracing info
774     * </pre>
775     */
776    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder();
777  }
778  /**
779   * Protobuf type {@code hadoop.common.RpcRequestHeaderProto}
780   *
781   * <pre>
782   * the header for the RpcRequest
783   * </pre>
784   */
785  public static final class RpcRequestHeaderProto extends
786      com.google.protobuf.GeneratedMessage
787      implements RpcRequestHeaderProtoOrBuilder {
788    // Use RpcRequestHeaderProto.newBuilder() to construct.
789    private RpcRequestHeaderProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
790      super(builder);
791      this.unknownFields = builder.getUnknownFields();
792    }
793    private RpcRequestHeaderProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
794
795    private static final RpcRequestHeaderProto defaultInstance;
796    public static RpcRequestHeaderProto getDefaultInstance() {
797      return defaultInstance;
798    }
799
800    public RpcRequestHeaderProto getDefaultInstanceForType() {
801      return defaultInstance;
802    }
803
804    private final com.google.protobuf.UnknownFieldSet unknownFields;
805    @java.lang.Override
806    public final com.google.protobuf.UnknownFieldSet
807        getUnknownFields() {
808      return this.unknownFields;
809    }
810    private RpcRequestHeaderProto(
811        com.google.protobuf.CodedInputStream input,
812        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
813        throws com.google.protobuf.InvalidProtocolBufferException {
814      initFields();
815      int mutable_bitField0_ = 0;
816      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
817          com.google.protobuf.UnknownFieldSet.newBuilder();
818      try {
819        boolean done = false;
820        while (!done) {
821          int tag = input.readTag();
822          switch (tag) {
823            case 0:
824              done = true;
825              break;
826            default: {
827              if (!parseUnknownField(input, unknownFields,
828                                     extensionRegistry, tag)) {
829                done = true;
830              }
831              break;
832            }
833            case 8: {
834              int rawValue = input.readEnum();
835              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.valueOf(rawValue);
836              if (value == null) {
837                unknownFields.mergeVarintField(1, rawValue);
838              } else {
839                bitField0_ |= 0x00000001;
840                rpcKind_ = value;
841              }
842              break;
843            }
844            case 16: {
845              int rawValue = input.readEnum();
846              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.valueOf(rawValue);
847              if (value == null) {
848                unknownFields.mergeVarintField(2, rawValue);
849              } else {
850                bitField0_ |= 0x00000002;
851                rpcOp_ = value;
852              }
853              break;
854            }
855            case 24: {
856              bitField0_ |= 0x00000004;
857              callId_ = input.readSInt32();
858              break;
859            }
860            case 34: {
861              bitField0_ |= 0x00000008;
862              clientId_ = input.readBytes();
863              break;
864            }
865            case 40: {
866              bitField0_ |= 0x00000010;
867              retryCount_ = input.readSInt32();
868              break;
869            }
870            case 50: {
871              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder subBuilder = null;
872              if (((bitField0_ & 0x00000020) == 0x00000020)) {
873                subBuilder = traceInfo_.toBuilder();
874              }
875              traceInfo_ = input.readMessage(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.PARSER, extensionRegistry);
876              if (subBuilder != null) {
877                subBuilder.mergeFrom(traceInfo_);
878                traceInfo_ = subBuilder.buildPartial();
879              }
880              bitField0_ |= 0x00000020;
881              break;
882            }
883          }
884        }
885      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
886        throw e.setUnfinishedMessage(this);
887      } catch (java.io.IOException e) {
888        throw new com.google.protobuf.InvalidProtocolBufferException(
889            e.getMessage()).setUnfinishedMessage(this);
890      } finally {
891        this.unknownFields = unknownFields.build();
892        makeExtensionsImmutable();
893      }
894    }
895    public static final com.google.protobuf.Descriptors.Descriptor
896        getDescriptor() {
897      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
898    }
899
900    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
901        internalGetFieldAccessorTable() {
902      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable
903          .ensureFieldAccessorsInitialized(
904              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.Builder.class);
905    }
906
907    public static com.google.protobuf.Parser<RpcRequestHeaderProto> PARSER =
908        new com.google.protobuf.AbstractParser<RpcRequestHeaderProto>() {
909      public RpcRequestHeaderProto parsePartialFrom(
910          com.google.protobuf.CodedInputStream input,
911          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
912          throws com.google.protobuf.InvalidProtocolBufferException {
913        return new RpcRequestHeaderProto(input, extensionRegistry);
914      }
915    };
916
917    @java.lang.Override
918    public com.google.protobuf.Parser<RpcRequestHeaderProto> getParserForType() {
919      return PARSER;
920    }
921
922    /**
923     * Protobuf enum {@code hadoop.common.RpcRequestHeaderProto.OperationProto}
924     */
925    public enum OperationProto
926        implements com.google.protobuf.ProtocolMessageEnum {
927      /**
928       * <code>RPC_FINAL_PACKET = 0;</code>
929       *
930       * <pre>
931       * The final RPC Packet
932       * </pre>
933       */
934      RPC_FINAL_PACKET(0, 0),
935      /**
936       * <code>RPC_CONTINUATION_PACKET = 1;</code>
937       *
938       * <pre>
939       * not implemented yet
940       * </pre>
941       */
942      RPC_CONTINUATION_PACKET(1, 1),
943      /**
944       * <code>RPC_CLOSE_CONNECTION = 2;</code>
945       *
946       * <pre>
947       * close the rpc connection
948       * </pre>
949       */
950      RPC_CLOSE_CONNECTION(2, 2),
951      ;
952
953      /**
954       * <code>RPC_FINAL_PACKET = 0;</code>
955       *
956       * <pre>
957       * The final RPC Packet
958       * </pre>
959       */
960      public static final int RPC_FINAL_PACKET_VALUE = 0;
961      /**
962       * <code>RPC_CONTINUATION_PACKET = 1;</code>
963       *
964       * <pre>
965       * not implemented yet
966       * </pre>
967       */
968      public static final int RPC_CONTINUATION_PACKET_VALUE = 1;
969      /**
970       * <code>RPC_CLOSE_CONNECTION = 2;</code>
971       *
972       * <pre>
973       * close the rpc connection
974       * </pre>
975       */
976      public static final int RPC_CLOSE_CONNECTION_VALUE = 2;
977
978
979      public final int getNumber() { return value; }
980
981      public static OperationProto valueOf(int value) {
982        switch (value) {
983          case 0: return RPC_FINAL_PACKET;
984          case 1: return RPC_CONTINUATION_PACKET;
985          case 2: return RPC_CLOSE_CONNECTION;
986          default: return null;
987        }
988      }
989
990      public static com.google.protobuf.Internal.EnumLiteMap<OperationProto>
991          internalGetValueMap() {
992        return internalValueMap;
993      }
994      private static com.google.protobuf.Internal.EnumLiteMap<OperationProto>
995          internalValueMap =
996            new com.google.protobuf.Internal.EnumLiteMap<OperationProto>() {
997              public OperationProto findValueByNumber(int number) {
998                return OperationProto.valueOf(number);
999              }
1000            };
1001
1002      public final com.google.protobuf.Descriptors.EnumValueDescriptor
1003          getValueDescriptor() {
1004        return getDescriptor().getValues().get(index);
1005      }
1006      public final com.google.protobuf.Descriptors.EnumDescriptor
1007          getDescriptorForType() {
1008        return getDescriptor();
1009      }
1010      public static final com.google.protobuf.Descriptors.EnumDescriptor
1011          getDescriptor() {
1012        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDescriptor().getEnumTypes().get(0);
1013      }
1014
1015      private static final OperationProto[] VALUES = values();
1016
1017      public static OperationProto valueOf(
1018          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
1019        if (desc.getType() != getDescriptor()) {
1020          throw new java.lang.IllegalArgumentException(
1021            "EnumValueDescriptor is not for this type.");
1022        }
1023        return VALUES[desc.getIndex()];
1024      }
1025
1026      private final int index;
1027      private final int value;
1028
1029      private OperationProto(int index, int value) {
1030        this.index = index;
1031        this.value = value;
1032      }
1033
1034      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcRequestHeaderProto.OperationProto)
1035    }
1036
1037    private int bitField0_;
1038    // optional .hadoop.common.RpcKindProto rpcKind = 1;
1039    public static final int RPCKIND_FIELD_NUMBER = 1;
1040    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto rpcKind_;
1041    /**
1042     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
1043     */
1044    public boolean hasRpcKind() {
1045      return ((bitField0_ & 0x00000001) == 0x00000001);
1046    }
1047    /**
1048     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
1049     */
1050    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind() {
1051      return rpcKind_;
1052    }
1053
1054    // optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
1055    public static final int RPCOP_FIELD_NUMBER = 2;
1056    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto rpcOp_;
1057    /**
1058     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1059     */
1060    public boolean hasRpcOp() {
1061      return ((bitField0_ & 0x00000002) == 0x00000002);
1062    }
1063    /**
1064     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1065     */
1066    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp() {
1067      return rpcOp_;
1068    }
1069
1070    // required sint32 callId = 3;
1071    public static final int CALLID_FIELD_NUMBER = 3;
1072    private int callId_;
1073    /**
1074     * <code>required sint32 callId = 3;</code>
1075     *
1076     * <pre>
1077     * a sequence number that is sent back in response
1078     * </pre>
1079     */
1080    public boolean hasCallId() {
1081      return ((bitField0_ & 0x00000004) == 0x00000004);
1082    }
1083    /**
1084     * <code>required sint32 callId = 3;</code>
1085     *
1086     * <pre>
1087     * a sequence number that is sent back in response
1088     * </pre>
1089     */
1090    public int getCallId() {
1091      return callId_;
1092    }
1093
1094    // required bytes clientId = 4;
1095    public static final int CLIENTID_FIELD_NUMBER = 4;
1096    private com.google.protobuf.ByteString clientId_;
1097    /**
1098     * <code>required bytes clientId = 4;</code>
1099     *
1100     * <pre>
1101     * Globally unique client ID
1102     * </pre>
1103     */
1104    public boolean hasClientId() {
1105      return ((bitField0_ & 0x00000008) == 0x00000008);
1106    }
1107    /**
1108     * <code>required bytes clientId = 4;</code>
1109     *
1110     * <pre>
1111     * Globally unique client ID
1112     * </pre>
1113     */
1114    public com.google.protobuf.ByteString getClientId() {
1115      return clientId_;
1116    }
1117
1118    // optional sint32 retryCount = 5 [default = -1];
1119    public static final int RETRYCOUNT_FIELD_NUMBER = 5;
1120    private int retryCount_;
1121    /**
1122     * <code>optional sint32 retryCount = 5 [default = -1];</code>
1123     *
1124     * <pre>
1125     * clientId + callId uniquely identifies a request
1126     * retry count, 1 means this is the first retry
1127     * </pre>
1128     */
1129    public boolean hasRetryCount() {
1130      return ((bitField0_ & 0x00000010) == 0x00000010);
1131    }
1132    /**
1133     * <code>optional sint32 retryCount = 5 [default = -1];</code>
1134     *
1135     * <pre>
1136     * clientId + callId uniquely identifies a request
1137     * retry count, 1 means this is the first retry
1138     * </pre>
1139     */
1140    public int getRetryCount() {
1141      return retryCount_;
1142    }
1143
1144    // optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
1145    public static final int TRACEINFO_FIELD_NUMBER = 6;
1146    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto traceInfo_;
1147    /**
1148     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1149     *
1150     * <pre>
1151     * tracing info
1152     * </pre>
1153     */
1154    public boolean hasTraceInfo() {
1155      return ((bitField0_ & 0x00000020) == 0x00000020);
1156    }
1157    /**
1158     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1159     *
1160     * <pre>
1161     * tracing info
1162     * </pre>
1163     */
1164    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo() {
1165      return traceInfo_;
1166    }
1167    /**
1168     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1169     *
1170     * <pre>
1171     * tracing info
1172     * </pre>
1173     */
1174    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder() {
1175      return traceInfo_;
1176    }
1177
1178    private void initFields() {
1179      rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
1180      rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
1181      callId_ = 0;
1182      clientId_ = com.google.protobuf.ByteString.EMPTY;
1183      retryCount_ = -1;
1184      traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
1185    }
1186    private byte memoizedIsInitialized = -1;
1187    public final boolean isInitialized() {
1188      byte isInitialized = memoizedIsInitialized;
1189      if (isInitialized != -1) return isInitialized == 1;
1190
1191      if (!hasCallId()) {
1192        memoizedIsInitialized = 0;
1193        return false;
1194      }
1195      if (!hasClientId()) {
1196        memoizedIsInitialized = 0;
1197        return false;
1198      }
1199      memoizedIsInitialized = 1;
1200      return true;
1201    }
1202
1203    public void writeTo(com.google.protobuf.CodedOutputStream output)
1204                        throws java.io.IOException {
1205      getSerializedSize();
1206      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1207        output.writeEnum(1, rpcKind_.getNumber());
1208      }
1209      if (((bitField0_ & 0x00000002) == 0x00000002)) {
1210        output.writeEnum(2, rpcOp_.getNumber());
1211      }
1212      if (((bitField0_ & 0x00000004) == 0x00000004)) {
1213        output.writeSInt32(3, callId_);
1214      }
1215      if (((bitField0_ & 0x00000008) == 0x00000008)) {
1216        output.writeBytes(4, clientId_);
1217      }
1218      if (((bitField0_ & 0x00000010) == 0x00000010)) {
1219        output.writeSInt32(5, retryCount_);
1220      }
1221      if (((bitField0_ & 0x00000020) == 0x00000020)) {
1222        output.writeMessage(6, traceInfo_);
1223      }
1224      getUnknownFields().writeTo(output);
1225    }
1226
1227    private int memoizedSerializedSize = -1;
1228    public int getSerializedSize() {
1229      int size = memoizedSerializedSize;
1230      if (size != -1) return size;
1231
1232      size = 0;
1233      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1234        size += com.google.protobuf.CodedOutputStream
1235          .computeEnumSize(1, rpcKind_.getNumber());
1236      }
1237      if (((bitField0_ & 0x00000002) == 0x00000002)) {
1238        size += com.google.protobuf.CodedOutputStream
1239          .computeEnumSize(2, rpcOp_.getNumber());
1240      }
1241      if (((bitField0_ & 0x00000004) == 0x00000004)) {
1242        size += com.google.protobuf.CodedOutputStream
1243          .computeSInt32Size(3, callId_);
1244      }
1245      if (((bitField0_ & 0x00000008) == 0x00000008)) {
1246        size += com.google.protobuf.CodedOutputStream
1247          .computeBytesSize(4, clientId_);
1248      }
1249      if (((bitField0_ & 0x00000010) == 0x00000010)) {
1250        size += com.google.protobuf.CodedOutputStream
1251          .computeSInt32Size(5, retryCount_);
1252      }
1253      if (((bitField0_ & 0x00000020) == 0x00000020)) {
1254        size += com.google.protobuf.CodedOutputStream
1255          .computeMessageSize(6, traceInfo_);
1256      }
1257      size += getUnknownFields().getSerializedSize();
1258      memoizedSerializedSize = size;
1259      return size;
1260    }
1261
1262    private static final long serialVersionUID = 0L;
1263    @java.lang.Override
1264    protected java.lang.Object writeReplace()
1265        throws java.io.ObjectStreamException {
1266      return super.writeReplace();
1267    }
1268
1269    @java.lang.Override
1270    public boolean equals(final java.lang.Object obj) {
1271      if (obj == this) {
1272       return true;
1273      }
1274      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto)) {
1275        return super.equals(obj);
1276      }
1277      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) obj;
1278
1279      boolean result = true;
1280      result = result && (hasRpcKind() == other.hasRpcKind());
1281      if (hasRpcKind()) {
1282        result = result &&
1283            (getRpcKind() == other.getRpcKind());
1284      }
1285      result = result && (hasRpcOp() == other.hasRpcOp());
1286      if (hasRpcOp()) {
1287        result = result &&
1288            (getRpcOp() == other.getRpcOp());
1289      }
1290      result = result && (hasCallId() == other.hasCallId());
1291      if (hasCallId()) {
1292        result = result && (getCallId()
1293            == other.getCallId());
1294      }
1295      result = result && (hasClientId() == other.hasClientId());
1296      if (hasClientId()) {
1297        result = result && getClientId()
1298            .equals(other.getClientId());
1299      }
1300      result = result && (hasRetryCount() == other.hasRetryCount());
1301      if (hasRetryCount()) {
1302        result = result && (getRetryCount()
1303            == other.getRetryCount());
1304      }
1305      result = result && (hasTraceInfo() == other.hasTraceInfo());
1306      if (hasTraceInfo()) {
1307        result = result && getTraceInfo()
1308            .equals(other.getTraceInfo());
1309      }
1310      result = result &&
1311          getUnknownFields().equals(other.getUnknownFields());
1312      return result;
1313    }
1314
1315    private int memoizedHashCode = 0;
1316    @java.lang.Override
1317    public int hashCode() {
1318      if (memoizedHashCode != 0) {
1319        return memoizedHashCode;
1320      }
1321      int hash = 41;
1322      hash = (19 * hash) + getDescriptorForType().hashCode();
1323      if (hasRpcKind()) {
1324        hash = (37 * hash) + RPCKIND_FIELD_NUMBER;
1325        hash = (53 * hash) + hashEnum(getRpcKind());
1326      }
1327      if (hasRpcOp()) {
1328        hash = (37 * hash) + RPCOP_FIELD_NUMBER;
1329        hash = (53 * hash) + hashEnum(getRpcOp());
1330      }
1331      if (hasCallId()) {
1332        hash = (37 * hash) + CALLID_FIELD_NUMBER;
1333        hash = (53 * hash) + getCallId();
1334      }
1335      if (hasClientId()) {
1336        hash = (37 * hash) + CLIENTID_FIELD_NUMBER;
1337        hash = (53 * hash) + getClientId().hashCode();
1338      }
1339      if (hasRetryCount()) {
1340        hash = (37 * hash) + RETRYCOUNT_FIELD_NUMBER;
1341        hash = (53 * hash) + getRetryCount();
1342      }
1343      if (hasTraceInfo()) {
1344        hash = (37 * hash) + TRACEINFO_FIELD_NUMBER;
1345        hash = (53 * hash) + getTraceInfo().hashCode();
1346      }
1347      hash = (29 * hash) + getUnknownFields().hashCode();
1348      memoizedHashCode = hash;
1349      return hash;
1350    }
1351
1352    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
1353        com.google.protobuf.ByteString data)
1354        throws com.google.protobuf.InvalidProtocolBufferException {
1355      return PARSER.parseFrom(data);
1356    }
1357    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
1358        com.google.protobuf.ByteString data,
1359        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1360        throws com.google.protobuf.InvalidProtocolBufferException {
1361      return PARSER.parseFrom(data, extensionRegistry);
1362    }
1363    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(byte[] data)
1364        throws com.google.protobuf.InvalidProtocolBufferException {
1365      return PARSER.parseFrom(data);
1366    }
1367    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
1368        byte[] data,
1369        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1370        throws com.google.protobuf.InvalidProtocolBufferException {
1371      return PARSER.parseFrom(data, extensionRegistry);
1372    }
1373    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(java.io.InputStream input)
1374        throws java.io.IOException {
1375      return PARSER.parseFrom(input);
1376    }
1377    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
1378        java.io.InputStream input,
1379        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1380        throws java.io.IOException {
1381      return PARSER.parseFrom(input, extensionRegistry);
1382    }
1383    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseDelimitedFrom(java.io.InputStream input)
1384        throws java.io.IOException {
1385      return PARSER.parseDelimitedFrom(input);
1386    }
1387    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseDelimitedFrom(
1388        java.io.InputStream input,
1389        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1390        throws java.io.IOException {
1391      return PARSER.parseDelimitedFrom(input, extensionRegistry);
1392    }
1393    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
1394        com.google.protobuf.CodedInputStream input)
1395        throws java.io.IOException {
1396      return PARSER.parseFrom(input);
1397    }
1398    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
1399        com.google.protobuf.CodedInputStream input,
1400        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1401        throws java.io.IOException {
1402      return PARSER.parseFrom(input, extensionRegistry);
1403    }
1404
1405    public static Builder newBuilder() { return Builder.create(); }
1406    public Builder newBuilderForType() { return newBuilder(); }
1407    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto prototype) {
1408      return newBuilder().mergeFrom(prototype);
1409    }
1410    public Builder toBuilder() { return newBuilder(this); }
1411
1412    @java.lang.Override
1413    protected Builder newBuilderForType(
1414        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1415      Builder builder = new Builder(parent);
1416      return builder;
1417    }
1418    /**
1419     * Protobuf type {@code hadoop.common.RpcRequestHeaderProto}
1420     *
1421     * <pre>
1422     * the header for the RpcRequest
1423     * </pre>
1424     */
1425    public static final class Builder extends
1426        com.google.protobuf.GeneratedMessage.Builder<Builder>
1427       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProtoOrBuilder {
1428      public static final com.google.protobuf.Descriptors.Descriptor
1429          getDescriptor() {
1430        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
1431      }
1432
1433      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1434          internalGetFieldAccessorTable() {
1435        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable
1436            .ensureFieldAccessorsInitialized(
1437                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.Builder.class);
1438      }
1439
1440      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.newBuilder()
1441      private Builder() {
1442        maybeForceBuilderInitialization();
1443      }
1444
1445      private Builder(
1446          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1447        super(parent);
1448        maybeForceBuilderInitialization();
1449      }
1450      private void maybeForceBuilderInitialization() {
1451        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1452          getTraceInfoFieldBuilder();
1453        }
1454      }
1455      private static Builder create() {
1456        return new Builder();
1457      }
1458
1459      public Builder clear() {
1460        super.clear();
1461        rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
1462        bitField0_ = (bitField0_ & ~0x00000001);
1463        rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
1464        bitField0_ = (bitField0_ & ~0x00000002);
1465        callId_ = 0;
1466        bitField0_ = (bitField0_ & ~0x00000004);
1467        clientId_ = com.google.protobuf.ByteString.EMPTY;
1468        bitField0_ = (bitField0_ & ~0x00000008);
1469        retryCount_ = -1;
1470        bitField0_ = (bitField0_ & ~0x00000010);
1471        if (traceInfoBuilder_ == null) {
1472          traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
1473        } else {
1474          traceInfoBuilder_.clear();
1475        }
1476        bitField0_ = (bitField0_ & ~0x00000020);
1477        return this;
1478      }
1479
1480      public Builder clone() {
1481        return create().mergeFrom(buildPartial());
1482      }
1483
1484      public com.google.protobuf.Descriptors.Descriptor
1485          getDescriptorForType() {
1486        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
1487      }
1488
1489      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto getDefaultInstanceForType() {
1490        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDefaultInstance();
1491      }
1492
1493      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto build() {
1494        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result = buildPartial();
1495        if (!result.isInitialized()) {
1496          throw newUninitializedMessageException(result);
1497        }
1498        return result;
1499      }
1500
1501      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto buildPartial() {
1502        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto(this);
1503        int from_bitField0_ = bitField0_;
1504        int to_bitField0_ = 0;
1505        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1506          to_bitField0_ |= 0x00000001;
1507        }
1508        result.rpcKind_ = rpcKind_;
1509        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1510          to_bitField0_ |= 0x00000002;
1511        }
1512        result.rpcOp_ = rpcOp_;
1513        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
1514          to_bitField0_ |= 0x00000004;
1515        }
1516        result.callId_ = callId_;
1517        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
1518          to_bitField0_ |= 0x00000008;
1519        }
1520        result.clientId_ = clientId_;
1521        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
1522          to_bitField0_ |= 0x00000010;
1523        }
1524        result.retryCount_ = retryCount_;
1525        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
1526          to_bitField0_ |= 0x00000020;
1527        }
1528        if (traceInfoBuilder_ == null) {
1529          result.traceInfo_ = traceInfo_;
1530        } else {
1531          result.traceInfo_ = traceInfoBuilder_.build();
1532        }
1533        result.bitField0_ = to_bitField0_;
1534        onBuilt();
1535        return result;
1536      }
1537
1538      public Builder mergeFrom(com.google.protobuf.Message other) {
1539        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) {
1540          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto)other);
1541        } else {
1542          super.mergeFrom(other);
1543          return this;
1544        }
1545      }
1546
1547      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto other) {
1548        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDefaultInstance()) return this;
1549        if (other.hasRpcKind()) {
1550          setRpcKind(other.getRpcKind());
1551        }
1552        if (other.hasRpcOp()) {
1553          setRpcOp(other.getRpcOp());
1554        }
1555        if (other.hasCallId()) {
1556          setCallId(other.getCallId());
1557        }
1558        if (other.hasClientId()) {
1559          setClientId(other.getClientId());
1560        }
1561        if (other.hasRetryCount()) {
1562          setRetryCount(other.getRetryCount());
1563        }
1564        if (other.hasTraceInfo()) {
1565          mergeTraceInfo(other.getTraceInfo());
1566        }
1567        this.mergeUnknownFields(other.getUnknownFields());
1568        return this;
1569      }
1570
1571      public final boolean isInitialized() {
1572        if (!hasCallId()) {
1573          
1574          return false;
1575        }
1576        if (!hasClientId()) {
1577          
1578          return false;
1579        }
1580        return true;
1581      }
1582
1583      public Builder mergeFrom(
1584          com.google.protobuf.CodedInputStream input,
1585          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1586          throws java.io.IOException {
1587        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parsedMessage = null;
1588        try {
1589          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1590        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1591          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) e.getUnfinishedMessage();
1592          throw e;
1593        } finally {
1594          if (parsedMessage != null) {
1595            mergeFrom(parsedMessage);
1596          }
1597        }
1598        return this;
1599      }
1600      private int bitField0_;
1601
1602      // optional .hadoop.common.RpcKindProto rpcKind = 1;
1603      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
1604      /**
1605       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
1606       */
1607      public boolean hasRpcKind() {
1608        return ((bitField0_ & 0x00000001) == 0x00000001);
1609      }
1610      /**
1611       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
1612       */
1613      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind() {
1614        return rpcKind_;
1615      }
1616      /**
1617       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
1618       */
1619      public Builder setRpcKind(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto value) {
1620        if (value == null) {
1621          throw new NullPointerException();
1622        }
1623        bitField0_ |= 0x00000001;
1624        rpcKind_ = value;
1625        onChanged();
1626        return this;
1627      }
1628      /**
1629       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
1630       */
1631      public Builder clearRpcKind() {
1632        bitField0_ = (bitField0_ & ~0x00000001);
1633        rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
1634        onChanged();
1635        return this;
1636      }
1637
1638      // optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
1639      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
1640      /**
1641       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1642       */
1643      public boolean hasRpcOp() {
1644        return ((bitField0_ & 0x00000002) == 0x00000002);
1645      }
1646      /**
1647       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1648       */
1649      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp() {
1650        return rpcOp_;
1651      }
1652      /**
1653       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1654       */
1655      public Builder setRpcOp(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto value) {
1656        if (value == null) {
1657          throw new NullPointerException();
1658        }
1659        bitField0_ |= 0x00000002;
1660        rpcOp_ = value;
1661        onChanged();
1662        return this;
1663      }
1664      /**
1665       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1666       */
1667      public Builder clearRpcOp() {
1668        bitField0_ = (bitField0_ & ~0x00000002);
1669        rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
1670        onChanged();
1671        return this;
1672      }
1673
1674      // required sint32 callId = 3;
1675      private int callId_ ;
1676      /**
1677       * <code>required sint32 callId = 3;</code>
1678       *
1679       * <pre>
1680       * a sequence number that is sent back in response
1681       * </pre>
1682       */
1683      public boolean hasCallId() {
1684        return ((bitField0_ & 0x00000004) == 0x00000004);
1685      }
1686      /**
1687       * <code>required sint32 callId = 3;</code>
1688       *
1689       * <pre>
1690       * a sequence number that is sent back in response
1691       * </pre>
1692       */
1693      public int getCallId() {
1694        return callId_;
1695      }
1696      /**
1697       * <code>required sint32 callId = 3;</code>
1698       *
1699       * <pre>
1700       * a sequence number that is sent back in response
1701       * </pre>
1702       */
1703      public Builder setCallId(int value) {
1704        bitField0_ |= 0x00000004;
1705        callId_ = value;
1706        onChanged();
1707        return this;
1708      }
1709      /**
1710       * <code>required sint32 callId = 3;</code>
1711       *
1712       * <pre>
1713       * a sequence number that is sent back in response
1714       * </pre>
1715       */
1716      public Builder clearCallId() {
1717        bitField0_ = (bitField0_ & ~0x00000004);
1718        callId_ = 0;
1719        onChanged();
1720        return this;
1721      }
1722
1723      // required bytes clientId = 4;
1724      private com.google.protobuf.ByteString clientId_ = com.google.protobuf.ByteString.EMPTY;
1725      /**
1726       * <code>required bytes clientId = 4;</code>
1727       *
1728       * <pre>
1729       * Globally unique client ID
1730       * </pre>
1731       */
1732      public boolean hasClientId() {
1733        return ((bitField0_ & 0x00000008) == 0x00000008);
1734      }
1735      /**
1736       * <code>required bytes clientId = 4;</code>
1737       *
1738       * <pre>
1739       * Globally unique client ID
1740       * </pre>
1741       */
1742      public com.google.protobuf.ByteString getClientId() {
1743        return clientId_;
1744      }
1745      /**
1746       * <code>required bytes clientId = 4;</code>
1747       *
1748       * <pre>
1749       * Globally unique client ID
1750       * </pre>
1751       */
1752      public Builder setClientId(com.google.protobuf.ByteString value) {
1753        if (value == null) {
1754    throw new NullPointerException();
1755  }
1756  bitField0_ |= 0x00000008;
1757        clientId_ = value;
1758        onChanged();
1759        return this;
1760      }
1761      /**
1762       * <code>required bytes clientId = 4;</code>
1763       *
1764       * <pre>
1765       * Globally unique client ID
1766       * </pre>
1767       */
1768      public Builder clearClientId() {
1769        bitField0_ = (bitField0_ & ~0x00000008);
1770        clientId_ = getDefaultInstance().getClientId();
1771        onChanged();
1772        return this;
1773      }
1774
1775      // optional sint32 retryCount = 5 [default = -1];
1776      private int retryCount_ = -1;
1777      /**
1778       * <code>optional sint32 retryCount = 5 [default = -1];</code>
1779       *
1780       * <pre>
1781       * clientId + callId uniquely identifies a request
1782       * retry count, 1 means this is the first retry
1783       * </pre>
1784       */
1785      public boolean hasRetryCount() {
1786        return ((bitField0_ & 0x00000010) == 0x00000010);
1787      }
1788      /**
1789       * <code>optional sint32 retryCount = 5 [default = -1];</code>
1790       *
1791       * <pre>
1792       * clientId + callId uniquely identifies a request
1793       * retry count, 1 means this is the first retry
1794       * </pre>
1795       */
1796      public int getRetryCount() {
1797        return retryCount_;
1798      }
1799      /**
1800       * <code>optional sint32 retryCount = 5 [default = -1];</code>
1801       *
1802       * <pre>
1803       * clientId + callId uniquely identifies a request
1804       * retry count, 1 means this is the first retry
1805       * </pre>
1806       */
1807      public Builder setRetryCount(int value) {
1808        bitField0_ |= 0x00000010;
1809        retryCount_ = value;
1810        onChanged();
1811        return this;
1812      }
1813      /**
1814       * <code>optional sint32 retryCount = 5 [default = -1];</code>
1815       *
1816       * <pre>
1817       * clientId + callId uniquely identifies a request
1818       * retry count, 1 means this is the first retry
1819       * </pre>
1820       */
1821      public Builder clearRetryCount() {
1822        bitField0_ = (bitField0_ & ~0x00000010);
1823        retryCount_ = -1;
1824        onChanged();
1825        return this;
1826      }
1827
1828      // optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
1829      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
1830      private com.google.protobuf.SingleFieldBuilder<
1831          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder> traceInfoBuilder_;
1832      /**
1833       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1834       *
1835       * <pre>
1836       * tracing info
1837       * </pre>
1838       */
1839      public boolean hasTraceInfo() {
1840        return ((bitField0_ & 0x00000020) == 0x00000020);
1841      }
1842      /**
1843       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1844       *
1845       * <pre>
1846       * tracing info
1847       * </pre>
1848       */
1849      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo() {
1850        if (traceInfoBuilder_ == null) {
1851          return traceInfo_;
1852        } else {
1853          return traceInfoBuilder_.getMessage();
1854        }
1855      }
1856      /**
1857       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1858       *
1859       * <pre>
1860       * tracing info
1861       * </pre>
1862       */
1863      public Builder setTraceInfo(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto value) {
1864        if (traceInfoBuilder_ == null) {
1865          if (value == null) {
1866            throw new NullPointerException();
1867          }
1868          traceInfo_ = value;
1869          onChanged();
1870        } else {
1871          traceInfoBuilder_.setMessage(value);
1872        }
1873        bitField0_ |= 0x00000020;
1874        return this;
1875      }
1876      /**
1877       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1878       *
1879       * <pre>
1880       * tracing info
1881       * </pre>
1882       */
1883      public Builder setTraceInfo(
1884          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder builderForValue) {
1885        if (traceInfoBuilder_ == null) {
1886          traceInfo_ = builderForValue.build();
1887          onChanged();
1888        } else {
1889          traceInfoBuilder_.setMessage(builderForValue.build());
1890        }
1891        bitField0_ |= 0x00000020;
1892        return this;
1893      }
1894      /**
1895       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1896       *
1897       * <pre>
1898       * tracing info
1899       * </pre>
1900       */
1901      public Builder mergeTraceInfo(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto value) {
1902        if (traceInfoBuilder_ == null) {
1903          if (((bitField0_ & 0x00000020) == 0x00000020) &&
1904              traceInfo_ != org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance()) {
1905            traceInfo_ =
1906              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.newBuilder(traceInfo_).mergeFrom(value).buildPartial();
1907          } else {
1908            traceInfo_ = value;
1909          }
1910          onChanged();
1911        } else {
1912          traceInfoBuilder_.mergeFrom(value);
1913        }
1914        bitField0_ |= 0x00000020;
1915        return this;
1916      }
1917      /**
1918       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1919       *
1920       * <pre>
1921       * tracing info
1922       * </pre>
1923       */
1924      public Builder clearTraceInfo() {
1925        if (traceInfoBuilder_ == null) {
1926          traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
1927          onChanged();
1928        } else {
1929          traceInfoBuilder_.clear();
1930        }
1931        bitField0_ = (bitField0_ & ~0x00000020);
1932        return this;
1933      }
1934      /**
1935       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1936       *
1937       * <pre>
1938       * tracing info
1939       * </pre>
1940       */
1941      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder getTraceInfoBuilder() {
1942        bitField0_ |= 0x00000020;
1943        onChanged();
1944        return getTraceInfoFieldBuilder().getBuilder();
1945      }
1946      /**
1947       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1948       *
1949       * <pre>
1950       * tracing info
1951       * </pre>
1952       */
1953      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder() {
1954        if (traceInfoBuilder_ != null) {
1955          return traceInfoBuilder_.getMessageOrBuilder();
1956        } else {
1957          return traceInfo_;
1958        }
1959      }
1960      /**
1961       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1962       *
1963       * <pre>
1964       * tracing info
1965       * </pre>
1966       */
1967      private com.google.protobuf.SingleFieldBuilder<
1968          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder> 
1969          getTraceInfoFieldBuilder() {
1970        if (traceInfoBuilder_ == null) {
1971          traceInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
1972              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder>(
1973                  traceInfo_,
1974                  getParentForChildren(),
1975                  isClean());
1976          traceInfo_ = null;
1977        }
1978        return traceInfoBuilder_;
1979      }
1980
1981      // @@protoc_insertion_point(builder_scope:hadoop.common.RpcRequestHeaderProto)
1982    }
1983
1984    static {
1985      defaultInstance = new RpcRequestHeaderProto(true);
1986      defaultInstance.initFields();
1987    }
1988
1989    // @@protoc_insertion_point(class_scope:hadoop.common.RpcRequestHeaderProto)
1990  }
1991
1992  public interface RpcResponseHeaderProtoOrBuilder
1993      extends com.google.protobuf.MessageOrBuilder {
1994
1995    // required uint32 callId = 1;
1996    /**
1997     * <code>required uint32 callId = 1;</code>
1998     *
1999     * <pre>
2000     * callId used in Request
2001     * </pre>
2002     */
2003    boolean hasCallId();
2004    /**
2005     * <code>required uint32 callId = 1;</code>
2006     *
2007     * <pre>
2008     * callId used in Request
2009     * </pre>
2010     */
2011    int getCallId();
2012
2013    // required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
2014    /**
2015     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
2016     */
2017    boolean hasStatus();
2018    /**
2019     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
2020     */
2021    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus();
2022
2023    // optional uint32 serverIpcVersionNum = 3;
2024    /**
2025     * <code>optional uint32 serverIpcVersionNum = 3;</code>
2026     *
2027     * <pre>
2028     * Sent if success or fail
2029     * </pre>
2030     */
2031    boolean hasServerIpcVersionNum();
2032    /**
2033     * <code>optional uint32 serverIpcVersionNum = 3;</code>
2034     *
2035     * <pre>
2036     * Sent if success or fail
2037     * </pre>
2038     */
2039    int getServerIpcVersionNum();
2040
2041    // optional string exceptionClassName = 4;
2042    /**
2043     * <code>optional string exceptionClassName = 4;</code>
2044     *
2045     * <pre>
2046     * if request fails
2047     * </pre>
2048     */
2049    boolean hasExceptionClassName();
2050    /**
2051     * <code>optional string exceptionClassName = 4;</code>
2052     *
2053     * <pre>
2054     * if request fails
2055     * </pre>
2056     */
2057    java.lang.String getExceptionClassName();
2058    /**
2059     * <code>optional string exceptionClassName = 4;</code>
2060     *
2061     * <pre>
2062     * if request fails
2063     * </pre>
2064     */
2065    com.google.protobuf.ByteString
2066        getExceptionClassNameBytes();
2067
2068    // optional string errorMsg = 5;
2069    /**
2070     * <code>optional string errorMsg = 5;</code>
2071     *
2072     * <pre>
2073     * if request fails, often contains strack trace
2074     * </pre>
2075     */
2076    boolean hasErrorMsg();
2077    /**
2078     * <code>optional string errorMsg = 5;</code>
2079     *
2080     * <pre>
2081     * if request fails, often contains strack trace
2082     * </pre>
2083     */
2084    java.lang.String getErrorMsg();
2085    /**
2086     * <code>optional string errorMsg = 5;</code>
2087     *
2088     * <pre>
2089     * if request fails, often contains strack trace
2090     * </pre>
2091     */
2092    com.google.protobuf.ByteString
2093        getErrorMsgBytes();
2094
2095    // optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
2096    /**
2097     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
2098     *
2099     * <pre>
2100     * in case of error
2101     * </pre>
2102     */
2103    boolean hasErrorDetail();
2104    /**
2105     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
2106     *
2107     * <pre>
2108     * in case of error
2109     * </pre>
2110     */
2111    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail();
2112
2113    // optional bytes clientId = 7;
2114    /**
2115     * <code>optional bytes clientId = 7;</code>
2116     *
2117     * <pre>
2118     * Globally unique client ID
2119     * </pre>
2120     */
2121    boolean hasClientId();
2122    /**
2123     * <code>optional bytes clientId = 7;</code>
2124     *
2125     * <pre>
2126     * Globally unique client ID
2127     * </pre>
2128     */
2129    com.google.protobuf.ByteString getClientId();
2130
2131    // optional sint32 retryCount = 8 [default = -1];
2132    /**
2133     * <code>optional sint32 retryCount = 8 [default = -1];</code>
2134     */
2135    boolean hasRetryCount();
2136    /**
2137     * <code>optional sint32 retryCount = 8 [default = -1];</code>
2138     */
2139    int getRetryCount();
2140  }
2141  /**
2142   * Protobuf type {@code hadoop.common.RpcResponseHeaderProto}
2143   *
2144   * <pre>
2145   **
2146   * Rpc Response Header
2147   * +------------------------------------------------------------------+
2148   * | Rpc total response length in bytes (4 bytes int)                 |
2149   * |  (sum of next two parts)                                         |
2150   * +------------------------------------------------------------------+
2151   * | RpcResponseHeaderProto - serialized delimited ie has len         |
2152   * +------------------------------------------------------------------+
2153   * | if request is successful:                                        |
2154   * |   - RpcResponse -  The actual rpc response  bytes follow         |
2155   * |     the response header                                          |
2156   * |     This response is serialized based on RpcKindProto            |
2157   * | if request fails :                                               |
2158   * |   The rpc response header contains the necessary info            |
2159   * +------------------------------------------------------------------+
2160   *
2161   * Note that rpc response header is also used when connection setup fails. 
2162   * Ie the response looks like a rpc response with a fake callId.
2163   * </pre>
2164   */
2165  public static final class RpcResponseHeaderProto extends
2166      com.google.protobuf.GeneratedMessage
2167      implements RpcResponseHeaderProtoOrBuilder {
2168    // Use RpcResponseHeaderProto.newBuilder() to construct.
2169    private RpcResponseHeaderProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2170      super(builder);
2171      this.unknownFields = builder.getUnknownFields();
2172    }
2173    private RpcResponseHeaderProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2174
2175    private static final RpcResponseHeaderProto defaultInstance;
2176    public static RpcResponseHeaderProto getDefaultInstance() {
2177      return defaultInstance;
2178    }
2179
2180    public RpcResponseHeaderProto getDefaultInstanceForType() {
2181      return defaultInstance;
2182    }
2183
2184    private final com.google.protobuf.UnknownFieldSet unknownFields;
2185    @java.lang.Override
2186    public final com.google.protobuf.UnknownFieldSet
2187        getUnknownFields() {
2188      return this.unknownFields;
2189    }
2190    private RpcResponseHeaderProto(
2191        com.google.protobuf.CodedInputStream input,
2192        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2193        throws com.google.protobuf.InvalidProtocolBufferException {
2194      initFields();
2195      int mutable_bitField0_ = 0;
2196      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2197          com.google.protobuf.UnknownFieldSet.newBuilder();
2198      try {
2199        boolean done = false;
2200        while (!done) {
2201          int tag = input.readTag();
2202          switch (tag) {
2203            case 0:
2204              done = true;
2205              break;
2206            default: {
2207              if (!parseUnknownField(input, unknownFields,
2208                                     extensionRegistry, tag)) {
2209                done = true;
2210              }
2211              break;
2212            }
2213            case 8: {
2214              bitField0_ |= 0x00000001;
2215              callId_ = input.readUInt32();
2216              break;
2217            }
2218            case 16: {
2219              int rawValue = input.readEnum();
2220              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.valueOf(rawValue);
2221              if (value == null) {
2222                unknownFields.mergeVarintField(2, rawValue);
2223              } else {
2224                bitField0_ |= 0x00000002;
2225                status_ = value;
2226              }
2227              break;
2228            }
2229            case 24: {
2230              bitField0_ |= 0x00000004;
2231              serverIpcVersionNum_ = input.readUInt32();
2232              break;
2233            }
2234            case 34: {
2235              bitField0_ |= 0x00000008;
2236              exceptionClassName_ = input.readBytes();
2237              break;
2238            }
2239            case 42: {
2240              bitField0_ |= 0x00000010;
2241              errorMsg_ = input.readBytes();
2242              break;
2243            }
2244            case 48: {
2245              int rawValue = input.readEnum();
2246              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.valueOf(rawValue);
2247              if (value == null) {
2248                unknownFields.mergeVarintField(6, rawValue);
2249              } else {
2250                bitField0_ |= 0x00000020;
2251                errorDetail_ = value;
2252              }
2253              break;
2254            }
2255            case 58: {
2256              bitField0_ |= 0x00000040;
2257              clientId_ = input.readBytes();
2258              break;
2259            }
2260            case 64: {
2261              bitField0_ |= 0x00000080;
2262              retryCount_ = input.readSInt32();
2263              break;
2264            }
2265          }
2266        }
2267      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2268        throw e.setUnfinishedMessage(this);
2269      } catch (java.io.IOException e) {
2270        throw new com.google.protobuf.InvalidProtocolBufferException(
2271            e.getMessage()).setUnfinishedMessage(this);
2272      } finally {
2273        this.unknownFields = unknownFields.build();
2274        makeExtensionsImmutable();
2275      }
2276    }
2277    public static final com.google.protobuf.Descriptors.Descriptor
2278        getDescriptor() {
2279      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
2280    }
2281
2282    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2283        internalGetFieldAccessorTable() {
2284      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable
2285          .ensureFieldAccessorsInitialized(
2286              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Builder.class);
2287    }
2288
2289    public static com.google.protobuf.Parser<RpcResponseHeaderProto> PARSER =
2290        new com.google.protobuf.AbstractParser<RpcResponseHeaderProto>() {
2291      public RpcResponseHeaderProto parsePartialFrom(
2292          com.google.protobuf.CodedInputStream input,
2293          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2294          throws com.google.protobuf.InvalidProtocolBufferException {
2295        return new RpcResponseHeaderProto(input, extensionRegistry);
2296      }
2297    };
2298
2299    @java.lang.Override
2300    public com.google.protobuf.Parser<RpcResponseHeaderProto> getParserForType() {
2301      return PARSER;
2302    }
2303
2304    /**
2305     * Protobuf enum {@code hadoop.common.RpcResponseHeaderProto.RpcStatusProto}
2306     */
2307    public enum RpcStatusProto
2308        implements com.google.protobuf.ProtocolMessageEnum {
2309      /**
2310       * <code>SUCCESS = 0;</code>
2311       *
2312       * <pre>
2313       * RPC succeeded
2314       * </pre>
2315       */
2316      SUCCESS(0, 0),
2317      /**
2318       * <code>ERROR = 1;</code>
2319       *
2320       * <pre>
2321       * RPC or error - connection left open for future calls
2322       * </pre>
2323       */
2324      ERROR(1, 1),
2325      /**
2326       * <code>FATAL = 2;</code>
2327       *
2328       * <pre>
2329       * Fatal error - connection closed
2330       * </pre>
2331       */
2332      FATAL(2, 2),
2333      ;
2334
2335      /**
2336       * <code>SUCCESS = 0;</code>
2337       *
2338       * <pre>
2339       * RPC succeeded
2340       * </pre>
2341       */
2342      public static final int SUCCESS_VALUE = 0;
2343      /**
2344       * <code>ERROR = 1;</code>
2345       *
2346       * <pre>
2347       * RPC or error - connection left open for future calls
2348       * </pre>
2349       */
2350      public static final int ERROR_VALUE = 1;
2351      /**
2352       * <code>FATAL = 2;</code>
2353       *
2354       * <pre>
2355       * Fatal error - connection closed
2356       * </pre>
2357       */
2358      public static final int FATAL_VALUE = 2;
2359
2360
2361      public final int getNumber() { return value; }
2362
2363      public static RpcStatusProto valueOf(int value) {
2364        switch (value) {
2365          case 0: return SUCCESS;
2366          case 1: return ERROR;
2367          case 2: return FATAL;
2368          default: return null;
2369        }
2370      }
2371
2372      public static com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>
2373          internalGetValueMap() {
2374        return internalValueMap;
2375      }
2376      private static com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>
2377          internalValueMap =
2378            new com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>() {
2379              public RpcStatusProto findValueByNumber(int number) {
2380                return RpcStatusProto.valueOf(number);
2381              }
2382            };
2383
2384      public final com.google.protobuf.Descriptors.EnumValueDescriptor
2385          getValueDescriptor() {
2386        return getDescriptor().getValues().get(index);
2387      }
2388      public final com.google.protobuf.Descriptors.EnumDescriptor
2389          getDescriptorForType() {
2390        return getDescriptor();
2391      }
2392      public static final com.google.protobuf.Descriptors.EnumDescriptor
2393          getDescriptor() {
2394        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDescriptor().getEnumTypes().get(0);
2395      }
2396
2397      private static final RpcStatusProto[] VALUES = values();
2398
2399      public static RpcStatusProto valueOf(
2400          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
2401        if (desc.getType() != getDescriptor()) {
2402          throw new java.lang.IllegalArgumentException(
2403            "EnumValueDescriptor is not for this type.");
2404        }
2405        return VALUES[desc.getIndex()];
2406      }
2407
2408      private final int index;
2409      private final int value;
2410
2411      private RpcStatusProto(int index, int value) {
2412        this.index = index;
2413        this.value = value;
2414      }
2415
2416      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcResponseHeaderProto.RpcStatusProto)
2417    }
2418
2419    /**
2420     * Protobuf enum {@code hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto}
2421     */
2422    public enum RpcErrorCodeProto
2423        implements com.google.protobuf.ProtocolMessageEnum {
2424      /**
2425       * <code>ERROR_APPLICATION = 1;</code>
2426       *
2427       * <pre>
2428       * Non-fatal Rpc error - connection left open for future rpc calls
2429       * </pre>
2430       */
2431      ERROR_APPLICATION(0, 1),
2432      /**
2433       * <code>ERROR_NO_SUCH_METHOD = 2;</code>
2434       *
2435       * <pre>
2436       * Rpc error - no such method
2437       * </pre>
2438       */
2439      ERROR_NO_SUCH_METHOD(1, 2),
2440      /**
2441       * <code>ERROR_NO_SUCH_PROTOCOL = 3;</code>
2442       *
2443       * <pre>
2444       * Rpc error - no such protocol
2445       * </pre>
2446       */
2447      ERROR_NO_SUCH_PROTOCOL(2, 3),
2448      /**
2449       * <code>ERROR_RPC_SERVER = 4;</code>
2450       *
2451       * <pre>
2452       * Rpc error on server side
2453       * </pre>
2454       */
2455      ERROR_RPC_SERVER(3, 4),
2456      /**
2457       * <code>ERROR_SERIALIZING_RESPONSE = 5;</code>
2458       *
2459       * <pre>
2460       * error serializign response
2461       * </pre>
2462       */
2463      ERROR_SERIALIZING_RESPONSE(4, 5),
2464      /**
2465       * <code>ERROR_RPC_VERSION_MISMATCH = 6;</code>
2466       *
2467       * <pre>
2468       * Rpc protocol version mismatch
2469       * </pre>
2470       */
2471      ERROR_RPC_VERSION_MISMATCH(5, 6),
2472      /**
2473       * <code>FATAL_UNKNOWN = 10;</code>
2474       *
2475       * <pre>
2476       * Fatal Server side Rpc error - connection closed
2477       * </pre>
2478       */
2479      FATAL_UNKNOWN(6, 10),
2480      /**
2481       * <code>FATAL_UNSUPPORTED_SERIALIZATION = 11;</code>
2482       *
2483       * <pre>
2484       * IPC layer serilization type invalid
2485       * </pre>
2486       */
2487      FATAL_UNSUPPORTED_SERIALIZATION(7, 11),
2488      /**
2489       * <code>FATAL_INVALID_RPC_HEADER = 12;</code>
2490       *
2491       * <pre>
2492       * fields of RpcHeader are invalid
2493       * </pre>
2494       */
2495      FATAL_INVALID_RPC_HEADER(8, 12),
2496      /**
2497       * <code>FATAL_DESERIALIZING_REQUEST = 13;</code>
2498       *
2499       * <pre>
2500       * could not deserilize rpc request
2501       * </pre>
2502       */
2503      FATAL_DESERIALIZING_REQUEST(9, 13),
2504      /**
2505       * <code>FATAL_VERSION_MISMATCH = 14;</code>
2506       *
2507       * <pre>
2508       * Ipc Layer version mismatch
2509       * </pre>
2510       */
2511      FATAL_VERSION_MISMATCH(10, 14),
2512      /**
2513       * <code>FATAL_UNAUTHORIZED = 15;</code>
2514       *
2515       * <pre>
2516       * Auth failed
2517       * </pre>
2518       */
2519      FATAL_UNAUTHORIZED(11, 15),
2520      ;
2521
2522      /**
2523       * <code>ERROR_APPLICATION = 1;</code>
2524       *
2525       * <pre>
2526       * Non-fatal Rpc error - connection left open for future rpc calls
2527       * </pre>
2528       */
2529      public static final int ERROR_APPLICATION_VALUE = 1;
2530      /**
2531       * <code>ERROR_NO_SUCH_METHOD = 2;</code>
2532       *
2533       * <pre>
2534       * Rpc error - no such method
2535       * </pre>
2536       */
2537      public static final int ERROR_NO_SUCH_METHOD_VALUE = 2;
2538      /**
2539       * <code>ERROR_NO_SUCH_PROTOCOL = 3;</code>
2540       *
2541       * <pre>
2542       * Rpc error - no such protocol
2543       * </pre>
2544       */
2545      public static final int ERROR_NO_SUCH_PROTOCOL_VALUE = 3;
2546      /**
2547       * <code>ERROR_RPC_SERVER = 4;</code>
2548       *
2549       * <pre>
2550       * Rpc error on server side
2551       * </pre>
2552       */
2553      public static final int ERROR_RPC_SERVER_VALUE = 4;
2554      /**
2555       * <code>ERROR_SERIALIZING_RESPONSE = 5;</code>
2556       *
2557       * <pre>
2558       * error serializign response
2559       * </pre>
2560       */
2561      public static final int ERROR_SERIALIZING_RESPONSE_VALUE = 5;
2562      /**
2563       * <code>ERROR_RPC_VERSION_MISMATCH = 6;</code>
2564       *
2565       * <pre>
2566       * Rpc protocol version mismatch
2567       * </pre>
2568       */
2569      public static final int ERROR_RPC_VERSION_MISMATCH_VALUE = 6;
2570      /**
2571       * <code>FATAL_UNKNOWN = 10;</code>
2572       *
2573       * <pre>
2574       * Fatal Server side Rpc error - connection closed
2575       * </pre>
2576       */
2577      public static final int FATAL_UNKNOWN_VALUE = 10;
2578      /**
2579       * <code>FATAL_UNSUPPORTED_SERIALIZATION = 11;</code>
2580       *
2581       * <pre>
2582       * IPC layer serilization type invalid
2583       * </pre>
2584       */
2585      public static final int FATAL_UNSUPPORTED_SERIALIZATION_VALUE = 11;
2586      /**
2587       * <code>FATAL_INVALID_RPC_HEADER = 12;</code>
2588       *
2589       * <pre>
2590       * fields of RpcHeader are invalid
2591       * </pre>
2592       */
2593      public static final int FATAL_INVALID_RPC_HEADER_VALUE = 12;
2594      /**
2595       * <code>FATAL_DESERIALIZING_REQUEST = 13;</code>
2596       *
2597       * <pre>
2598       * could not deserilize rpc request
2599       * </pre>
2600       */
2601      public static final int FATAL_DESERIALIZING_REQUEST_VALUE = 13;
2602      /**
2603       * <code>FATAL_VERSION_MISMATCH = 14;</code>
2604       *
2605       * <pre>
2606       * Ipc Layer version mismatch
2607       * </pre>
2608       */
2609      public static final int FATAL_VERSION_MISMATCH_VALUE = 14;
2610      /**
2611       * <code>FATAL_UNAUTHORIZED = 15;</code>
2612       *
2613       * <pre>
2614       * Auth failed
2615       * </pre>
2616       */
2617      public static final int FATAL_UNAUTHORIZED_VALUE = 15;
2618
2619
2620      public final int getNumber() { return value; }
2621
2622      public static RpcErrorCodeProto valueOf(int value) {
2623        switch (value) {
2624          case 1: return ERROR_APPLICATION;
2625          case 2: return ERROR_NO_SUCH_METHOD;
2626          case 3: return ERROR_NO_SUCH_PROTOCOL;
2627          case 4: return ERROR_RPC_SERVER;
2628          case 5: return ERROR_SERIALIZING_RESPONSE;
2629          case 6: return ERROR_RPC_VERSION_MISMATCH;
2630          case 10: return FATAL_UNKNOWN;
2631          case 11: return FATAL_UNSUPPORTED_SERIALIZATION;
2632          case 12: return FATAL_INVALID_RPC_HEADER;
2633          case 13: return FATAL_DESERIALIZING_REQUEST;
2634          case 14: return FATAL_VERSION_MISMATCH;
2635          case 15: return FATAL_UNAUTHORIZED;
2636          default: return null;
2637        }
2638      }
2639
2640      public static com.google.protobuf.Internal.EnumLiteMap<RpcErrorCodeProto>
2641          internalGetValueMap() {
2642        return internalValueMap;
2643      }
2644      private static com.google.protobuf.Internal.EnumLiteMap<RpcErrorCodeProto>
2645          internalValueMap =
2646            new com.google.protobuf.Internal.EnumLiteMap<RpcErrorCodeProto>() {
2647              public RpcErrorCodeProto findValueByNumber(int number) {
2648                return RpcErrorCodeProto.valueOf(number);
2649              }
2650            };
2651
2652      public final com.google.protobuf.Descriptors.EnumValueDescriptor
2653          getValueDescriptor() {
2654        return getDescriptor().getValues().get(index);
2655      }
2656      public final com.google.protobuf.Descriptors.EnumDescriptor
2657          getDescriptorForType() {
2658        return getDescriptor();
2659      }
2660      public static final com.google.protobuf.Descriptors.EnumDescriptor
2661          getDescriptor() {
2662        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDescriptor().getEnumTypes().get(1);
2663      }
2664
2665      private static final RpcErrorCodeProto[] VALUES = values();
2666
2667      public static RpcErrorCodeProto valueOf(
2668          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
2669        if (desc.getType() != getDescriptor()) {
2670          throw new java.lang.IllegalArgumentException(
2671            "EnumValueDescriptor is not for this type.");
2672        }
2673        return VALUES[desc.getIndex()];
2674      }
2675
2676      private final int index;
2677      private final int value;
2678
2679      private RpcErrorCodeProto(int index, int value) {
2680        this.index = index;
2681        this.value = value;
2682      }
2683
2684      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto)
2685    }
2686
2687    private int bitField0_;
2688    // required uint32 callId = 1;
2689    public static final int CALLID_FIELD_NUMBER = 1;
2690    private int callId_;
2691    /**
2692     * <code>required uint32 callId = 1;</code>
2693     *
2694     * <pre>
2695     * callId used in Request
2696     * </pre>
2697     */
2698    public boolean hasCallId() {
2699      return ((bitField0_ & 0x00000001) == 0x00000001);
2700    }
2701    /**
2702     * <code>required uint32 callId = 1;</code>
2703     *
2704     * <pre>
2705     * callId used in Request
2706     * </pre>
2707     */
2708    public int getCallId() {
2709      return callId_;
2710    }
2711
2712    // required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
2713    public static final int STATUS_FIELD_NUMBER = 2;
2714    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto status_;
2715    /**
2716     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
2717     */
2718    public boolean hasStatus() {
2719      return ((bitField0_ & 0x00000002) == 0x00000002);
2720    }
2721    /**
2722     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
2723     */
2724    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus() {
2725      return status_;
2726    }
2727
2728    // optional uint32 serverIpcVersionNum = 3;
2729    public static final int SERVERIPCVERSIONNUM_FIELD_NUMBER = 3;
2730    private int serverIpcVersionNum_;
2731    /**
2732     * <code>optional uint32 serverIpcVersionNum = 3;</code>
2733     *
2734     * <pre>
2735     * Sent if success or fail
2736     * </pre>
2737     */
2738    public boolean hasServerIpcVersionNum() {
2739      return ((bitField0_ & 0x00000004) == 0x00000004);
2740    }
2741    /**
2742     * <code>optional uint32 serverIpcVersionNum = 3;</code>
2743     *
2744     * <pre>
2745     * Sent if success or fail
2746     * </pre>
2747     */
2748    public int getServerIpcVersionNum() {
2749      return serverIpcVersionNum_;
2750    }
2751
2752    // optional string exceptionClassName = 4;
2753    public static final int EXCEPTIONCLASSNAME_FIELD_NUMBER = 4;
2754    private java.lang.Object exceptionClassName_;
2755    /**
2756     * <code>optional string exceptionClassName = 4;</code>
2757     *
2758     * <pre>
2759     * if request fails
2760     * </pre>
2761     */
2762    public boolean hasExceptionClassName() {
2763      return ((bitField0_ & 0x00000008) == 0x00000008);
2764    }
2765    /**
2766     * <code>optional string exceptionClassName = 4;</code>
2767     *
2768     * <pre>
2769     * if request fails
2770     * </pre>
2771     */
2772    public java.lang.String getExceptionClassName() {
2773      java.lang.Object ref = exceptionClassName_;
2774      if (ref instanceof java.lang.String) {
2775        return (java.lang.String) ref;
2776      } else {
2777        com.google.protobuf.ByteString bs = 
2778            (com.google.protobuf.ByteString) ref;
2779        java.lang.String s = bs.toStringUtf8();
2780        if (bs.isValidUtf8()) {
2781          exceptionClassName_ = s;
2782        }
2783        return s;
2784      }
2785    }
2786    /**
2787     * <code>optional string exceptionClassName = 4;</code>
2788     *
2789     * <pre>
2790     * if request fails
2791     * </pre>
2792     */
2793    public com.google.protobuf.ByteString
2794        getExceptionClassNameBytes() {
2795      java.lang.Object ref = exceptionClassName_;
2796      if (ref instanceof java.lang.String) {
2797        com.google.protobuf.ByteString b = 
2798            com.google.protobuf.ByteString.copyFromUtf8(
2799                (java.lang.String) ref);
2800        exceptionClassName_ = b;
2801        return b;
2802      } else {
2803        return (com.google.protobuf.ByteString) ref;
2804      }
2805    }
2806
2807    // optional string errorMsg = 5;
2808    public static final int ERRORMSG_FIELD_NUMBER = 5;
2809    private java.lang.Object errorMsg_;
2810    /**
2811     * <code>optional string errorMsg = 5;</code>
2812     *
2813     * <pre>
2814     * if request fails, often contains strack trace
2815     * </pre>
2816     */
2817    public boolean hasErrorMsg() {
2818      return ((bitField0_ & 0x00000010) == 0x00000010);
2819    }
2820    /**
2821     * <code>optional string errorMsg = 5;</code>
2822     *
2823     * <pre>
2824     * if request fails, often contains strack trace
2825     * </pre>
2826     */
2827    public java.lang.String getErrorMsg() {
2828      java.lang.Object ref = errorMsg_;
2829      if (ref instanceof java.lang.String) {
2830        return (java.lang.String) ref;
2831      } else {
2832        com.google.protobuf.ByteString bs = 
2833            (com.google.protobuf.ByteString) ref;
2834        java.lang.String s = bs.toStringUtf8();
2835        if (bs.isValidUtf8()) {
2836          errorMsg_ = s;
2837        }
2838        return s;
2839      }
2840    }
2841    /**
2842     * <code>optional string errorMsg = 5;</code>
2843     *
2844     * <pre>
2845     * if request fails, often contains strack trace
2846     * </pre>
2847     */
2848    public com.google.protobuf.ByteString
2849        getErrorMsgBytes() {
2850      java.lang.Object ref = errorMsg_;
2851      if (ref instanceof java.lang.String) {
2852        com.google.protobuf.ByteString b = 
2853            com.google.protobuf.ByteString.copyFromUtf8(
2854                (java.lang.String) ref);
2855        errorMsg_ = b;
2856        return b;
2857      } else {
2858        return (com.google.protobuf.ByteString) ref;
2859      }
2860    }
2861
2862    // optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
2863    public static final int ERRORDETAIL_FIELD_NUMBER = 6;
2864    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail_;
2865    /**
2866     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
2867     *
2868     * <pre>
2869     * in case of error
2870     * </pre>
2871     */
2872    public boolean hasErrorDetail() {
2873      return ((bitField0_ & 0x00000020) == 0x00000020);
2874    }
2875    /**
2876     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
2877     *
2878     * <pre>
2879     * in case of error
2880     * </pre>
2881     */
2882    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail() {
2883      return errorDetail_;
2884    }
2885
2886    // optional bytes clientId = 7;
2887    public static final int CLIENTID_FIELD_NUMBER = 7;
2888    private com.google.protobuf.ByteString clientId_;
2889    /**
2890     * <code>optional bytes clientId = 7;</code>
2891     *
2892     * <pre>
2893     * Globally unique client ID
2894     * </pre>
2895     */
2896    public boolean hasClientId() {
2897      return ((bitField0_ & 0x00000040) == 0x00000040);
2898    }
2899    /**
2900     * <code>optional bytes clientId = 7;</code>
2901     *
2902     * <pre>
2903     * Globally unique client ID
2904     * </pre>
2905     */
2906    public com.google.protobuf.ByteString getClientId() {
2907      return clientId_;
2908    }
2909
2910    // optional sint32 retryCount = 8 [default = -1];
2911    public static final int RETRYCOUNT_FIELD_NUMBER = 8;
2912    private int retryCount_;
2913    /**
2914     * <code>optional sint32 retryCount = 8 [default = -1];</code>
2915     */
2916    public boolean hasRetryCount() {
2917      return ((bitField0_ & 0x00000080) == 0x00000080);
2918    }
2919    /**
2920     * <code>optional sint32 retryCount = 8 [default = -1];</code>
2921     */
2922    public int getRetryCount() {
2923      return retryCount_;
2924    }
2925
2926    private void initFields() {
2927      callId_ = 0;
2928      status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
2929      serverIpcVersionNum_ = 0;
2930      exceptionClassName_ = "";
2931      errorMsg_ = "";
2932      errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
2933      clientId_ = com.google.protobuf.ByteString.EMPTY;
2934      retryCount_ = -1;
2935    }
2936    private byte memoizedIsInitialized = -1;
2937    public final boolean isInitialized() {
2938      byte isInitialized = memoizedIsInitialized;
2939      if (isInitialized != -1) return isInitialized == 1;
2940
2941      if (!hasCallId()) {
2942        memoizedIsInitialized = 0;
2943        return false;
2944      }
2945      if (!hasStatus()) {
2946        memoizedIsInitialized = 0;
2947        return false;
2948      }
2949      memoizedIsInitialized = 1;
2950      return true;
2951    }
2952
2953    public void writeTo(com.google.protobuf.CodedOutputStream output)
2954                        throws java.io.IOException {
2955      getSerializedSize();
2956      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2957        output.writeUInt32(1, callId_);
2958      }
2959      if (((bitField0_ & 0x00000002) == 0x00000002)) {
2960        output.writeEnum(2, status_.getNumber());
2961      }
2962      if (((bitField0_ & 0x00000004) == 0x00000004)) {
2963        output.writeUInt32(3, serverIpcVersionNum_);
2964      }
2965      if (((bitField0_ & 0x00000008) == 0x00000008)) {
2966        output.writeBytes(4, getExceptionClassNameBytes());
2967      }
2968      if (((bitField0_ & 0x00000010) == 0x00000010)) {
2969        output.writeBytes(5, getErrorMsgBytes());
2970      }
2971      if (((bitField0_ & 0x00000020) == 0x00000020)) {
2972        output.writeEnum(6, errorDetail_.getNumber());
2973      }
2974      if (((bitField0_ & 0x00000040) == 0x00000040)) {
2975        output.writeBytes(7, clientId_);
2976      }
2977      if (((bitField0_ & 0x00000080) == 0x00000080)) {
2978        output.writeSInt32(8, retryCount_);
2979      }
2980      getUnknownFields().writeTo(output);
2981    }
2982
2983    private int memoizedSerializedSize = -1;
2984    public int getSerializedSize() {
2985      int size = memoizedSerializedSize;
2986      if (size != -1) return size;
2987
2988      size = 0;
2989      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2990        size += com.google.protobuf.CodedOutputStream
2991          .computeUInt32Size(1, callId_);
2992      }
2993      if (((bitField0_ & 0x00000002) == 0x00000002)) {
2994        size += com.google.protobuf.CodedOutputStream
2995          .computeEnumSize(2, status_.getNumber());
2996      }
2997      if (((bitField0_ & 0x00000004) == 0x00000004)) {
2998        size += com.google.protobuf.CodedOutputStream
2999          .computeUInt32Size(3, serverIpcVersionNum_);
3000      }
3001      if (((bitField0_ & 0x00000008) == 0x00000008)) {
3002        size += com.google.protobuf.CodedOutputStream
3003          .computeBytesSize(4, getExceptionClassNameBytes());
3004      }
3005      if (((bitField0_ & 0x00000010) == 0x00000010)) {
3006        size += com.google.protobuf.CodedOutputStream
3007          .computeBytesSize(5, getErrorMsgBytes());
3008      }
3009      if (((bitField0_ & 0x00000020) == 0x00000020)) {
3010        size += com.google.protobuf.CodedOutputStream
3011          .computeEnumSize(6, errorDetail_.getNumber());
3012      }
3013      if (((bitField0_ & 0x00000040) == 0x00000040)) {
3014        size += com.google.protobuf.CodedOutputStream
3015          .computeBytesSize(7, clientId_);
3016      }
3017      if (((bitField0_ & 0x00000080) == 0x00000080)) {
3018        size += com.google.protobuf.CodedOutputStream
3019          .computeSInt32Size(8, retryCount_);
3020      }
3021      size += getUnknownFields().getSerializedSize();
3022      memoizedSerializedSize = size;
3023      return size;
3024    }
3025
3026    private static final long serialVersionUID = 0L;
3027    @java.lang.Override
3028    protected java.lang.Object writeReplace()
3029        throws java.io.ObjectStreamException {
3030      return super.writeReplace();
3031    }
3032
3033    @java.lang.Override
3034    public boolean equals(final java.lang.Object obj) {
3035      if (obj == this) {
3036       return true;
3037      }
3038      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto)) {
3039        return super.equals(obj);
3040      }
3041      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) obj;
3042
3043      boolean result = true;
3044      result = result && (hasCallId() == other.hasCallId());
3045      if (hasCallId()) {
3046        result = result && (getCallId()
3047            == other.getCallId());
3048      }
3049      result = result && (hasStatus() == other.hasStatus());
3050      if (hasStatus()) {
3051        result = result &&
3052            (getStatus() == other.getStatus());
3053      }
3054      result = result && (hasServerIpcVersionNum() == other.hasServerIpcVersionNum());
3055      if (hasServerIpcVersionNum()) {
3056        result = result && (getServerIpcVersionNum()
3057            == other.getServerIpcVersionNum());
3058      }
3059      result = result && (hasExceptionClassName() == other.hasExceptionClassName());
3060      if (hasExceptionClassName()) {
3061        result = result && getExceptionClassName()
3062            .equals(other.getExceptionClassName());
3063      }
3064      result = result && (hasErrorMsg() == other.hasErrorMsg());
3065      if (hasErrorMsg()) {
3066        result = result && getErrorMsg()
3067            .equals(other.getErrorMsg());
3068      }
3069      result = result && (hasErrorDetail() == other.hasErrorDetail());
3070      if (hasErrorDetail()) {
3071        result = result &&
3072            (getErrorDetail() == other.getErrorDetail());
3073      }
3074      result = result && (hasClientId() == other.hasClientId());
3075      if (hasClientId()) {
3076        result = result && getClientId()
3077            .equals(other.getClientId());
3078      }
3079      result = result && (hasRetryCount() == other.hasRetryCount());
3080      if (hasRetryCount()) {
3081        result = result && (getRetryCount()
3082            == other.getRetryCount());
3083      }
3084      result = result &&
3085          getUnknownFields().equals(other.getUnknownFields());
3086      return result;
3087    }
3088
3089    private int memoizedHashCode = 0;
3090    @java.lang.Override
3091    public int hashCode() {
3092      if (memoizedHashCode != 0) {
3093        return memoizedHashCode;
3094      }
3095      int hash = 41;
3096      hash = (19 * hash) + getDescriptorForType().hashCode();
3097      if (hasCallId()) {
3098        hash = (37 * hash) + CALLID_FIELD_NUMBER;
3099        hash = (53 * hash) + getCallId();
3100      }
3101      if (hasStatus()) {
3102        hash = (37 * hash) + STATUS_FIELD_NUMBER;
3103        hash = (53 * hash) + hashEnum(getStatus());
3104      }
3105      if (hasServerIpcVersionNum()) {
3106        hash = (37 * hash) + SERVERIPCVERSIONNUM_FIELD_NUMBER;
3107        hash = (53 * hash) + getServerIpcVersionNum();
3108      }
3109      if (hasExceptionClassName()) {
3110        hash = (37 * hash) + EXCEPTIONCLASSNAME_FIELD_NUMBER;
3111        hash = (53 * hash) + getExceptionClassName().hashCode();
3112      }
3113      if (hasErrorMsg()) {
3114        hash = (37 * hash) + ERRORMSG_FIELD_NUMBER;
3115        hash = (53 * hash) + getErrorMsg().hashCode();
3116      }
3117      if (hasErrorDetail()) {
3118        hash = (37 * hash) + ERRORDETAIL_FIELD_NUMBER;
3119        hash = (53 * hash) + hashEnum(getErrorDetail());
3120      }
3121      if (hasClientId()) {
3122        hash = (37 * hash) + CLIENTID_FIELD_NUMBER;
3123        hash = (53 * hash) + getClientId().hashCode();
3124      }
3125      if (hasRetryCount()) {
3126        hash = (37 * hash) + RETRYCOUNT_FIELD_NUMBER;
3127        hash = (53 * hash) + getRetryCount();
3128      }
3129      hash = (29 * hash) + getUnknownFields().hashCode();
3130      memoizedHashCode = hash;
3131      return hash;
3132    }
3133
3134    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
3135        com.google.protobuf.ByteString data)
3136        throws com.google.protobuf.InvalidProtocolBufferException {
3137      return PARSER.parseFrom(data);
3138    }
3139    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
3140        com.google.protobuf.ByteString data,
3141        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3142        throws com.google.protobuf.InvalidProtocolBufferException {
3143      return PARSER.parseFrom(data, extensionRegistry);
3144    }
3145    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(byte[] data)
3146        throws com.google.protobuf.InvalidProtocolBufferException {
3147      return PARSER.parseFrom(data);
3148    }
3149    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
3150        byte[] data,
3151        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3152        throws com.google.protobuf.InvalidProtocolBufferException {
3153      return PARSER.parseFrom(data, extensionRegistry);
3154    }
3155    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(java.io.InputStream input)
3156        throws java.io.IOException {
3157      return PARSER.parseFrom(input);
3158    }
3159    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
3160        java.io.InputStream input,
3161        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3162        throws java.io.IOException {
3163      return PARSER.parseFrom(input, extensionRegistry);
3164    }
3165    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(java.io.InputStream input)
3166        throws java.io.IOException {
3167      return PARSER.parseDelimitedFrom(input);
3168    }
3169    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(
3170        java.io.InputStream input,
3171        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3172        throws java.io.IOException {
3173      return PARSER.parseDelimitedFrom(input, extensionRegistry);
3174    }
3175    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
3176        com.google.protobuf.CodedInputStream input)
3177        throws java.io.IOException {
3178      return PARSER.parseFrom(input);
3179    }
3180    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
3181        com.google.protobuf.CodedInputStream input,
3182        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3183        throws java.io.IOException {
3184      return PARSER.parseFrom(input, extensionRegistry);
3185    }
3186
3187    public static Builder newBuilder() { return Builder.create(); }
3188    public Builder newBuilderForType() { return newBuilder(); }
3189    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto prototype) {
3190      return newBuilder().mergeFrom(prototype);
3191    }
3192    public Builder toBuilder() { return newBuilder(this); }
3193
3194    @java.lang.Override
3195    protected Builder newBuilderForType(
3196        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3197      Builder builder = new Builder(parent);
3198      return builder;
3199    }
3200    /**
3201     * Protobuf type {@code hadoop.common.RpcResponseHeaderProto}
3202     *
3203     * <pre>
3204     **
3205     * Rpc Response Header
3206     * +------------------------------------------------------------------+
3207     * | Rpc total response length in bytes (4 bytes int)                 |
3208     * |  (sum of next two parts)                                         |
3209     * +------------------------------------------------------------------+
3210     * | RpcResponseHeaderProto - serialized delimited ie has len         |
3211     * +------------------------------------------------------------------+
3212     * | if request is successful:                                        |
3213     * |   - RpcResponse -  The actual rpc response  bytes follow         |
3214     * |     the response header                                          |
3215     * |     This response is serialized based on RpcKindProto            |
3216     * | if request fails :                                               |
3217     * |   The rpc response header contains the necessary info            |
3218     * +------------------------------------------------------------------+
3219     *
3220     * Note that rpc response header is also used when connection setup fails. 
3221     * Ie the response looks like a rpc response with a fake callId.
3222     * </pre>
3223     */
3224    public static final class Builder extends
3225        com.google.protobuf.GeneratedMessage.Builder<Builder>
3226       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProtoOrBuilder {
3227      public static final com.google.protobuf.Descriptors.Descriptor
3228          getDescriptor() {
3229        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
3230      }
3231
3232      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3233          internalGetFieldAccessorTable() {
3234        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable
3235            .ensureFieldAccessorsInitialized(
3236                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Builder.class);
3237      }
3238
3239      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.newBuilder()
3240      private Builder() {
3241        maybeForceBuilderInitialization();
3242      }
3243
3244      private Builder(
3245          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3246        super(parent);
3247        maybeForceBuilderInitialization();
3248      }
3249      private void maybeForceBuilderInitialization() {
3250        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3251        }
3252      }
3253      private static Builder create() {
3254        return new Builder();
3255      }
3256
3257      public Builder clear() {
3258        super.clear();
3259        callId_ = 0;
3260        bitField0_ = (bitField0_ & ~0x00000001);
3261        status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
3262        bitField0_ = (bitField0_ & ~0x00000002);
3263        serverIpcVersionNum_ = 0;
3264        bitField0_ = (bitField0_ & ~0x00000004);
3265        exceptionClassName_ = "";
3266        bitField0_ = (bitField0_ & ~0x00000008);
3267        errorMsg_ = "";
3268        bitField0_ = (bitField0_ & ~0x00000010);
3269        errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
3270        bitField0_ = (bitField0_ & ~0x00000020);
3271        clientId_ = com.google.protobuf.ByteString.EMPTY;
3272        bitField0_ = (bitField0_ & ~0x00000040);
3273        retryCount_ = -1;
3274        bitField0_ = (bitField0_ & ~0x00000080);
3275        return this;
3276      }
3277
3278      public Builder clone() {
3279        return create().mergeFrom(buildPartial());
3280      }
3281
3282      public com.google.protobuf.Descriptors.Descriptor
3283          getDescriptorForType() {
3284        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
3285      }
3286
3287      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto getDefaultInstanceForType() {
3288        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDefaultInstance();
3289      }
3290
3291      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto build() {
3292        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result = buildPartial();
3293        if (!result.isInitialized()) {
3294          throw newUninitializedMessageException(result);
3295        }
3296        return result;
3297      }
3298
3299      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto buildPartial() {
3300        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto(this);
3301        int from_bitField0_ = bitField0_;
3302        int to_bitField0_ = 0;
3303        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3304          to_bitField0_ |= 0x00000001;
3305        }
3306        result.callId_ = callId_;
3307        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3308          to_bitField0_ |= 0x00000002;
3309        }
3310        result.status_ = status_;
3311        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
3312          to_bitField0_ |= 0x00000004;
3313        }
3314        result.serverIpcVersionNum_ = serverIpcVersionNum_;
3315        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
3316          to_bitField0_ |= 0x00000008;
3317        }
3318        result.exceptionClassName_ = exceptionClassName_;
3319        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
3320          to_bitField0_ |= 0x00000010;
3321        }
3322        result.errorMsg_ = errorMsg_;
3323        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
3324          to_bitField0_ |= 0x00000020;
3325        }
3326        result.errorDetail_ = errorDetail_;
3327        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
3328          to_bitField0_ |= 0x00000040;
3329        }
3330        result.clientId_ = clientId_;
3331        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
3332          to_bitField0_ |= 0x00000080;
3333        }
3334        result.retryCount_ = retryCount_;
3335        result.bitField0_ = to_bitField0_;
3336        onBuilt();
3337        return result;
3338      }
3339
3340      public Builder mergeFrom(com.google.protobuf.Message other) {
3341        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) {
3342          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto)other);
3343        } else {
3344          super.mergeFrom(other);
3345          return this;
3346        }
3347      }
3348
3349      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto other) {
3350        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDefaultInstance()) return this;
3351        if (other.hasCallId()) {
3352          setCallId(other.getCallId());
3353        }
3354        if (other.hasStatus()) {
3355          setStatus(other.getStatus());
3356        }
3357        if (other.hasServerIpcVersionNum()) {
3358          setServerIpcVersionNum(other.getServerIpcVersionNum());
3359        }
3360        if (other.hasExceptionClassName()) {
3361          bitField0_ |= 0x00000008;
3362          exceptionClassName_ = other.exceptionClassName_;
3363          onChanged();
3364        }
3365        if (other.hasErrorMsg()) {
3366          bitField0_ |= 0x00000010;
3367          errorMsg_ = other.errorMsg_;
3368          onChanged();
3369        }
3370        if (other.hasErrorDetail()) {
3371          setErrorDetail(other.getErrorDetail());
3372        }
3373        if (other.hasClientId()) {
3374          setClientId(other.getClientId());
3375        }
3376        if (other.hasRetryCount()) {
3377          setRetryCount(other.getRetryCount());
3378        }
3379        this.mergeUnknownFields(other.getUnknownFields());
3380        return this;
3381      }
3382
3383      public final boolean isInitialized() {
3384        if (!hasCallId()) {
3385          
3386          return false;
3387        }
3388        if (!hasStatus()) {
3389          
3390          return false;
3391        }
3392        return true;
3393      }
3394
3395      public Builder mergeFrom(
3396          com.google.protobuf.CodedInputStream input,
3397          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3398          throws java.io.IOException {
3399        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parsedMessage = null;
3400        try {
3401          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3402        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3403          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) e.getUnfinishedMessage();
3404          throw e;
3405        } finally {
3406          if (parsedMessage != null) {
3407            mergeFrom(parsedMessage);
3408          }
3409        }
3410        return this;
3411      }
3412      private int bitField0_;
3413
3414      // required uint32 callId = 1;
3415      private int callId_ ;
3416      /**
3417       * <code>required uint32 callId = 1;</code>
3418       *
3419       * <pre>
3420       * callId used in Request
3421       * </pre>
3422       */
3423      public boolean hasCallId() {
3424        return ((bitField0_ & 0x00000001) == 0x00000001);
3425      }
3426      /**
3427       * <code>required uint32 callId = 1;</code>
3428       *
3429       * <pre>
3430       * callId used in Request
3431       * </pre>
3432       */
3433      public int getCallId() {
3434        return callId_;
3435      }
3436      /**
3437       * <code>required uint32 callId = 1;</code>
3438       *
3439       * <pre>
3440       * callId used in Request
3441       * </pre>
3442       */
3443      public Builder setCallId(int value) {
3444        bitField0_ |= 0x00000001;
3445        callId_ = value;
3446        onChanged();
3447        return this;
3448      }
3449      /**
3450       * <code>required uint32 callId = 1;</code>
3451       *
3452       * <pre>
3453       * callId used in Request
3454       * </pre>
3455       */
3456      public Builder clearCallId() {
3457        bitField0_ = (bitField0_ & ~0x00000001);
3458        callId_ = 0;
3459        onChanged();
3460        return this;
3461      }
3462
3463      // required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
3464      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
3465      /**
3466       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
3467       */
3468      public boolean hasStatus() {
3469        return ((bitField0_ & 0x00000002) == 0x00000002);
3470      }
3471      /**
3472       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
3473       */
3474      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus() {
3475        return status_;
3476      }
3477      /**
3478       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
3479       */
3480      public Builder setStatus(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto value) {
3481        if (value == null) {
3482          throw new NullPointerException();
3483        }
3484        bitField0_ |= 0x00000002;
3485        status_ = value;
3486        onChanged();
3487        return this;
3488      }
3489      /**
3490       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
3491       */
3492      public Builder clearStatus() {
3493        bitField0_ = (bitField0_ & ~0x00000002);
3494        status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
3495        onChanged();
3496        return this;
3497      }
3498
3499      // optional uint32 serverIpcVersionNum = 3;
3500      private int serverIpcVersionNum_ ;
3501      /**
3502       * <code>optional uint32 serverIpcVersionNum = 3;</code>
3503       *
3504       * <pre>
3505       * Sent if success or fail
3506       * </pre>
3507       */
3508      public boolean hasServerIpcVersionNum() {
3509        return ((bitField0_ & 0x00000004) == 0x00000004);
3510      }
3511      /**
3512       * <code>optional uint32 serverIpcVersionNum = 3;</code>
3513       *
3514       * <pre>
3515       * Sent if success or fail
3516       * </pre>
3517       */
3518      public int getServerIpcVersionNum() {
3519        return serverIpcVersionNum_;
3520      }
3521      /**
3522       * <code>optional uint32 serverIpcVersionNum = 3;</code>
3523       *
3524       * <pre>
3525       * Sent if success or fail
3526       * </pre>
3527       */
3528      public Builder setServerIpcVersionNum(int value) {
3529        bitField0_ |= 0x00000004;
3530        serverIpcVersionNum_ = value;
3531        onChanged();
3532        return this;
3533      }
3534      /**
3535       * <code>optional uint32 serverIpcVersionNum = 3;</code>
3536       *
3537       * <pre>
3538       * Sent if success or fail
3539       * </pre>
3540       */
3541      public Builder clearServerIpcVersionNum() {
3542        bitField0_ = (bitField0_ & ~0x00000004);
3543        serverIpcVersionNum_ = 0;
3544        onChanged();
3545        return this;
3546      }
3547
3548      // optional string exceptionClassName = 4;
3549      private java.lang.Object exceptionClassName_ = "";
3550      /**
3551       * <code>optional string exceptionClassName = 4;</code>
3552       *
3553       * <pre>
3554       * if request fails
3555       * </pre>
3556       */
3557      public boolean hasExceptionClassName() {
3558        return ((bitField0_ & 0x00000008) == 0x00000008);
3559      }
3560      /**
3561       * <code>optional string exceptionClassName = 4;</code>
3562       *
3563       * <pre>
3564       * if request fails
3565       * </pre>
3566       */
3567      public java.lang.String getExceptionClassName() {
3568        java.lang.Object ref = exceptionClassName_;
3569        if (!(ref instanceof java.lang.String)) {
3570          java.lang.String s = ((com.google.protobuf.ByteString) ref)
3571              .toStringUtf8();
3572          exceptionClassName_ = s;
3573          return s;
3574        } else {
3575          return (java.lang.String) ref;
3576        }
3577      }
3578      /**
3579       * <code>optional string exceptionClassName = 4;</code>
3580       *
3581       * <pre>
3582       * if request fails
3583       * </pre>
3584       */
3585      public com.google.protobuf.ByteString
3586          getExceptionClassNameBytes() {
3587        java.lang.Object ref = exceptionClassName_;
3588        if (ref instanceof String) {
3589          com.google.protobuf.ByteString b = 
3590              com.google.protobuf.ByteString.copyFromUtf8(
3591                  (java.lang.String) ref);
3592          exceptionClassName_ = b;
3593          return b;
3594        } else {
3595          return (com.google.protobuf.ByteString) ref;
3596        }
3597      }
3598      /**
3599       * <code>optional string exceptionClassName = 4;</code>
3600       *
3601       * <pre>
3602       * if request fails
3603       * </pre>
3604       */
3605      public Builder setExceptionClassName(
3606          java.lang.String value) {
3607        if (value == null) {
3608    throw new NullPointerException();
3609  }
3610  bitField0_ |= 0x00000008;
3611        exceptionClassName_ = value;
3612        onChanged();
3613        return this;
3614      }
3615      /**
3616       * <code>optional string exceptionClassName = 4;</code>
3617       *
3618       * <pre>
3619       * if request fails
3620       * </pre>
3621       */
3622      public Builder clearExceptionClassName() {
3623        bitField0_ = (bitField0_ & ~0x00000008);
3624        exceptionClassName_ = getDefaultInstance().getExceptionClassName();
3625        onChanged();
3626        return this;
3627      }
3628      /**
3629       * <code>optional string exceptionClassName = 4;</code>
3630       *
3631       * <pre>
3632       * if request fails
3633       * </pre>
3634       */
3635      public Builder setExceptionClassNameBytes(
3636          com.google.protobuf.ByteString value) {
3637        if (value == null) {
3638    throw new NullPointerException();
3639  }
3640  bitField0_ |= 0x00000008;
3641        exceptionClassName_ = value;
3642        onChanged();
3643        return this;
3644      }
3645
3646      // optional string errorMsg = 5;
3647      private java.lang.Object errorMsg_ = "";
3648      /**
3649       * <code>optional string errorMsg = 5;</code>
3650       *
3651       * <pre>
3652       * if request fails, often contains strack trace
3653       * </pre>
3654       */
3655      public boolean hasErrorMsg() {
3656        return ((bitField0_ & 0x00000010) == 0x00000010);
3657      }
3658      /**
3659       * <code>optional string errorMsg = 5;</code>
3660       *
3661       * <pre>
3662       * if request fails, often contains strack trace
3663       * </pre>
3664       */
3665      public java.lang.String getErrorMsg() {
3666        java.lang.Object ref = errorMsg_;
3667        if (!(ref instanceof java.lang.String)) {
3668          java.lang.String s = ((com.google.protobuf.ByteString) ref)
3669              .toStringUtf8();
3670          errorMsg_ = s;
3671          return s;
3672        } else {
3673          return (java.lang.String) ref;
3674        }
3675      }
3676      /**
3677       * <code>optional string errorMsg = 5;</code>
3678       *
3679       * <pre>
3680       * if request fails, often contains strack trace
3681       * </pre>
3682       */
3683      public com.google.protobuf.ByteString
3684          getErrorMsgBytes() {
3685        java.lang.Object ref = errorMsg_;
3686        if (ref instanceof String) {
3687          com.google.protobuf.ByteString b = 
3688              com.google.protobuf.ByteString.copyFromUtf8(
3689                  (java.lang.String) ref);
3690          errorMsg_ = b;
3691          return b;
3692        } else {
3693          return (com.google.protobuf.ByteString) ref;
3694        }
3695      }
3696      /**
3697       * <code>optional string errorMsg = 5;</code>
3698       *
3699       * <pre>
3700       * if request fails, often contains strack trace
3701       * </pre>
3702       */
3703      public Builder setErrorMsg(
3704          java.lang.String value) {
3705        if (value == null) {
3706    throw new NullPointerException();
3707  }
3708  bitField0_ |= 0x00000010;
3709        errorMsg_ = value;
3710        onChanged();
3711        return this;
3712      }
3713      /**
3714       * <code>optional string errorMsg = 5;</code>
3715       *
3716       * <pre>
3717       * if request fails, often contains strack trace
3718       * </pre>
3719       */
3720      public Builder clearErrorMsg() {
3721        bitField0_ = (bitField0_ & ~0x00000010);
3722        errorMsg_ = getDefaultInstance().getErrorMsg();
3723        onChanged();
3724        return this;
3725      }
3726      /**
3727       * <code>optional string errorMsg = 5;</code>
3728       *
3729       * <pre>
3730       * if request fails, often contains strack trace
3731       * </pre>
3732       */
3733      public Builder setErrorMsgBytes(
3734          com.google.protobuf.ByteString value) {
3735        if (value == null) {
3736    throw new NullPointerException();
3737  }
3738  bitField0_ |= 0x00000010;
3739        errorMsg_ = value;
3740        onChanged();
3741        return this;
3742      }
3743
3744      // optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
3745      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
3746      /**
3747       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
3748       *
3749       * <pre>
3750       * in case of error
3751       * </pre>
3752       */
3753      public boolean hasErrorDetail() {
3754        return ((bitField0_ & 0x00000020) == 0x00000020);
3755      }
3756      /**
3757       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
3758       *
3759       * <pre>
3760       * in case of error
3761       * </pre>
3762       */
3763      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail() {
3764        return errorDetail_;
3765      }
3766      /**
3767       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
3768       *
3769       * <pre>
3770       * in case of error
3771       * </pre>
3772       */
3773      public Builder setErrorDetail(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto value) {
3774        if (value == null) {
3775          throw new NullPointerException();
3776        }
3777        bitField0_ |= 0x00000020;
3778        errorDetail_ = value;
3779        onChanged();
3780        return this;
3781      }
3782      /**
3783       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
3784       *
3785       * <pre>
3786       * in case of error
3787       * </pre>
3788       */
3789      public Builder clearErrorDetail() {
3790        bitField0_ = (bitField0_ & ~0x00000020);
3791        errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
3792        onChanged();
3793        return this;
3794      }
3795
3796      // optional bytes clientId = 7;
3797      private com.google.protobuf.ByteString clientId_ = com.google.protobuf.ByteString.EMPTY;
3798      /**
3799       * <code>optional bytes clientId = 7;</code>
3800       *
3801       * <pre>
3802       * Globally unique client ID
3803       * </pre>
3804       */
3805      public boolean hasClientId() {
3806        return ((bitField0_ & 0x00000040) == 0x00000040);
3807      }
3808      /**
3809       * <code>optional bytes clientId = 7;</code>
3810       *
3811       * <pre>
3812       * Globally unique client ID
3813       * </pre>
3814       */
3815      public com.google.protobuf.ByteString getClientId() {
3816        return clientId_;
3817      }
3818      /**
3819       * <code>optional bytes clientId = 7;</code>
3820       *
3821       * <pre>
3822       * Globally unique client ID
3823       * </pre>
3824       */
3825      public Builder setClientId(com.google.protobuf.ByteString value) {
3826        if (value == null) {
3827    throw new NullPointerException();
3828  }
3829  bitField0_ |= 0x00000040;
3830        clientId_ = value;
3831        onChanged();
3832        return this;
3833      }
3834      /**
3835       * <code>optional bytes clientId = 7;</code>
3836       *
3837       * <pre>
3838       * Globally unique client ID
3839       * </pre>
3840       */
3841      public Builder clearClientId() {
3842        bitField0_ = (bitField0_ & ~0x00000040);
3843        clientId_ = getDefaultInstance().getClientId();
3844        onChanged();
3845        return this;
3846      }
3847
3848      // optional sint32 retryCount = 8 [default = -1];
3849      private int retryCount_ = -1;
3850      /**
3851       * <code>optional sint32 retryCount = 8 [default = -1];</code>
3852       */
3853      public boolean hasRetryCount() {
3854        return ((bitField0_ & 0x00000080) == 0x00000080);
3855      }
3856      /**
3857       * <code>optional sint32 retryCount = 8 [default = -1];</code>
3858       */
3859      public int getRetryCount() {
3860        return retryCount_;
3861      }
3862      /**
3863       * <code>optional sint32 retryCount = 8 [default = -1];</code>
3864       */
3865      public Builder setRetryCount(int value) {
3866        bitField0_ |= 0x00000080;
3867        retryCount_ = value;
3868        onChanged();
3869        return this;
3870      }
3871      /**
3872       * <code>optional sint32 retryCount = 8 [default = -1];</code>
3873       */
3874      public Builder clearRetryCount() {
3875        bitField0_ = (bitField0_ & ~0x00000080);
3876        retryCount_ = -1;
3877        onChanged();
3878        return this;
3879      }
3880
3881      // @@protoc_insertion_point(builder_scope:hadoop.common.RpcResponseHeaderProto)
3882    }
3883
3884    static {
3885      defaultInstance = new RpcResponseHeaderProto(true);
3886      defaultInstance.initFields();
3887    }
3888
3889    // @@protoc_insertion_point(class_scope:hadoop.common.RpcResponseHeaderProto)
3890  }
3891
3892  public interface RpcSaslProtoOrBuilder
3893      extends com.google.protobuf.MessageOrBuilder {
3894
3895    // optional uint32 version = 1;
3896    /**
3897     * <code>optional uint32 version = 1;</code>
3898     */
3899    boolean hasVersion();
3900    /**
3901     * <code>optional uint32 version = 1;</code>
3902     */
3903    int getVersion();
3904
3905    // required .hadoop.common.RpcSaslProto.SaslState state = 2;
3906    /**
3907     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
3908     */
3909    boolean hasState();
3910    /**
3911     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
3912     */
3913    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState();
3914
3915    // optional bytes token = 3;
3916    /**
3917     * <code>optional bytes token = 3;</code>
3918     */
3919    boolean hasToken();
3920    /**
3921     * <code>optional bytes token = 3;</code>
3922     */
3923    com.google.protobuf.ByteString getToken();
3924
3925    // repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
3926    /**
3927     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
3928     */
3929    java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> 
3930        getAuthsList();
3931    /**
3932     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
3933     */
3934    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index);
3935    /**
3936     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
3937     */
3938    int getAuthsCount();
3939    /**
3940     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
3941     */
3942    java.util.List<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
3943        getAuthsOrBuilderList();
3944    /**
3945     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
3946     */
3947    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
3948        int index);
3949  }
3950  /**
3951   * Protobuf type {@code hadoop.common.RpcSaslProto}
3952   */
3953  public static final class RpcSaslProto extends
3954      com.google.protobuf.GeneratedMessage
3955      implements RpcSaslProtoOrBuilder {
3956    // Use RpcSaslProto.newBuilder() to construct.
3957    private RpcSaslProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3958      super(builder);
3959      this.unknownFields = builder.getUnknownFields();
3960    }
3961    private RpcSaslProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3962
3963    private static final RpcSaslProto defaultInstance;
3964    public static RpcSaslProto getDefaultInstance() {
3965      return defaultInstance;
3966    }
3967
3968    public RpcSaslProto getDefaultInstanceForType() {
3969      return defaultInstance;
3970    }
3971
3972    private final com.google.protobuf.UnknownFieldSet unknownFields;
3973    @java.lang.Override
3974    public final com.google.protobuf.UnknownFieldSet
3975        getUnknownFields() {
3976      return this.unknownFields;
3977    }
3978    private RpcSaslProto(
3979        com.google.protobuf.CodedInputStream input,
3980        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3981        throws com.google.protobuf.InvalidProtocolBufferException {
3982      initFields();
3983      int mutable_bitField0_ = 0;
3984      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3985          com.google.protobuf.UnknownFieldSet.newBuilder();
3986      try {
3987        boolean done = false;
3988        while (!done) {
3989          int tag = input.readTag();
3990          switch (tag) {
3991            case 0:
3992              done = true;
3993              break;
3994            default: {
3995              if (!parseUnknownField(input, unknownFields,
3996                                     extensionRegistry, tag)) {
3997                done = true;
3998              }
3999              break;
4000            }
4001            case 8: {
4002              bitField0_ |= 0x00000001;
4003              version_ = input.readUInt32();
4004              break;
4005            }
4006            case 16: {
4007              int rawValue = input.readEnum();
4008              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.valueOf(rawValue);
4009              if (value == null) {
4010                unknownFields.mergeVarintField(2, rawValue);
4011              } else {
4012                bitField0_ |= 0x00000002;
4013                state_ = value;
4014              }
4015              break;
4016            }
4017            case 26: {
4018              bitField0_ |= 0x00000004;
4019              token_ = input.readBytes();
4020              break;
4021            }
4022            case 34: {
4023              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
4024                auths_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth>();
4025                mutable_bitField0_ |= 0x00000008;
4026              }
4027              auths_.add(input.readMessage(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.PARSER, extensionRegistry));
4028              break;
4029            }
4030          }
4031        }
4032      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4033        throw e.setUnfinishedMessage(this);
4034      } catch (java.io.IOException e) {
4035        throw new com.google.protobuf.InvalidProtocolBufferException(
4036            e.getMessage()).setUnfinishedMessage(this);
4037      } finally {
4038        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
4039          auths_ = java.util.Collections.unmodifiableList(auths_);
4040        }
4041        this.unknownFields = unknownFields.build();
4042        makeExtensionsImmutable();
4043      }
4044    }
4045    public static final com.google.protobuf.Descriptors.Descriptor
4046        getDescriptor() {
4047      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
4048    }
4049
4050    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4051        internalGetFieldAccessorTable() {
4052      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable
4053          .ensureFieldAccessorsInitialized(
4054              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.Builder.class);
4055    }
4056
4057    public static com.google.protobuf.Parser<RpcSaslProto> PARSER =
4058        new com.google.protobuf.AbstractParser<RpcSaslProto>() {
4059      public RpcSaslProto parsePartialFrom(
4060          com.google.protobuf.CodedInputStream input,
4061          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4062          throws com.google.protobuf.InvalidProtocolBufferException {
4063        return new RpcSaslProto(input, extensionRegistry);
4064      }
4065    };
4066
4067    @java.lang.Override
4068    public com.google.protobuf.Parser<RpcSaslProto> getParserForType() {
4069      return PARSER;
4070    }
4071
4072    /**
4073     * Protobuf enum {@code hadoop.common.RpcSaslProto.SaslState}
4074     */
4075    public enum SaslState
4076        implements com.google.protobuf.ProtocolMessageEnum {
4077      /**
4078       * <code>SUCCESS = 0;</code>
4079       */
4080      SUCCESS(0, 0),
4081      /**
4082       * <code>NEGOTIATE = 1;</code>
4083       */
4084      NEGOTIATE(1, 1),
4085      /**
4086       * <code>INITIATE = 2;</code>
4087       */
4088      INITIATE(2, 2),
4089      /**
4090       * <code>CHALLENGE = 3;</code>
4091       */
4092      CHALLENGE(3, 3),
4093      /**
4094       * <code>RESPONSE = 4;</code>
4095       */
4096      RESPONSE(4, 4),
4097      /**
4098       * <code>WRAP = 5;</code>
4099       */
4100      WRAP(5, 5),
4101      ;
4102
4103      /**
4104       * <code>SUCCESS = 0;</code>
4105       */
4106      public static final int SUCCESS_VALUE = 0;
4107      /**
4108       * <code>NEGOTIATE = 1;</code>
4109       */
4110      public static final int NEGOTIATE_VALUE = 1;
4111      /**
4112       * <code>INITIATE = 2;</code>
4113       */
4114      public static final int INITIATE_VALUE = 2;
4115      /**
4116       * <code>CHALLENGE = 3;</code>
4117       */
4118      public static final int CHALLENGE_VALUE = 3;
4119      /**
4120       * <code>RESPONSE = 4;</code>
4121       */
4122      public static final int RESPONSE_VALUE = 4;
4123      /**
4124       * <code>WRAP = 5;</code>
4125       */
4126      public static final int WRAP_VALUE = 5;
4127
4128
4129      public final int getNumber() { return value; }
4130
4131      public static SaslState valueOf(int value) {
4132        switch (value) {
4133          case 0: return SUCCESS;
4134          case 1: return NEGOTIATE;
4135          case 2: return INITIATE;
4136          case 3: return CHALLENGE;
4137          case 4: return RESPONSE;
4138          case 5: return WRAP;
4139          default: return null;
4140        }
4141      }
4142
4143      public static com.google.protobuf.Internal.EnumLiteMap<SaslState>
4144          internalGetValueMap() {
4145        return internalValueMap;
4146      }
4147      private static com.google.protobuf.Internal.EnumLiteMap<SaslState>
4148          internalValueMap =
4149            new com.google.protobuf.Internal.EnumLiteMap<SaslState>() {
4150              public SaslState findValueByNumber(int number) {
4151                return SaslState.valueOf(number);
4152              }
4153            };
4154
4155      public final com.google.protobuf.Descriptors.EnumValueDescriptor
4156          getValueDescriptor() {
4157        return getDescriptor().getValues().get(index);
4158      }
4159      public final com.google.protobuf.Descriptors.EnumDescriptor
4160          getDescriptorForType() {
4161        return getDescriptor();
4162      }
4163      public static final com.google.protobuf.Descriptors.EnumDescriptor
4164          getDescriptor() {
4165        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDescriptor().getEnumTypes().get(0);
4166      }
4167
4168      private static final SaslState[] VALUES = values();
4169
4170      public static SaslState valueOf(
4171          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
4172        if (desc.getType() != getDescriptor()) {
4173          throw new java.lang.IllegalArgumentException(
4174            "EnumValueDescriptor is not for this type.");
4175        }
4176        return VALUES[desc.getIndex()];
4177      }
4178
4179      private final int index;
4180      private final int value;
4181
4182      private SaslState(int index, int value) {
4183        this.index = index;
4184        this.value = value;
4185      }
4186
4187      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcSaslProto.SaslState)
4188    }
4189
4190    public interface SaslAuthOrBuilder
4191        extends com.google.protobuf.MessageOrBuilder {
4192
4193      // required string method = 1;
4194      /**
4195       * <code>required string method = 1;</code>
4196       */
4197      boolean hasMethod();
4198      /**
4199       * <code>required string method = 1;</code>
4200       */
4201      java.lang.String getMethod();
4202      /**
4203       * <code>required string method = 1;</code>
4204       */
4205      com.google.protobuf.ByteString
4206          getMethodBytes();
4207
4208      // required string mechanism = 2;
4209      /**
4210       * <code>required string mechanism = 2;</code>
4211       */
4212      boolean hasMechanism();
4213      /**
4214       * <code>required string mechanism = 2;</code>
4215       */
4216      java.lang.String getMechanism();
4217      /**
4218       * <code>required string mechanism = 2;</code>
4219       */
4220      com.google.protobuf.ByteString
4221          getMechanismBytes();
4222
4223      // optional string protocol = 3;
4224      /**
4225       * <code>optional string protocol = 3;</code>
4226       */
4227      boolean hasProtocol();
4228      /**
4229       * <code>optional string protocol = 3;</code>
4230       */
4231      java.lang.String getProtocol();
4232      /**
4233       * <code>optional string protocol = 3;</code>
4234       */
4235      com.google.protobuf.ByteString
4236          getProtocolBytes();
4237
4238      // optional string serverId = 4;
4239      /**
4240       * <code>optional string serverId = 4;</code>
4241       */
4242      boolean hasServerId();
4243      /**
4244       * <code>optional string serverId = 4;</code>
4245       */
4246      java.lang.String getServerId();
4247      /**
4248       * <code>optional string serverId = 4;</code>
4249       */
4250      com.google.protobuf.ByteString
4251          getServerIdBytes();
4252
4253      // optional bytes challenge = 5;
4254      /**
4255       * <code>optional bytes challenge = 5;</code>
4256       */
4257      boolean hasChallenge();
4258      /**
4259       * <code>optional bytes challenge = 5;</code>
4260       */
4261      com.google.protobuf.ByteString getChallenge();
4262    }
4263    /**
4264     * Protobuf type {@code hadoop.common.RpcSaslProto.SaslAuth}
4265     */
4266    public static final class SaslAuth extends
4267        com.google.protobuf.GeneratedMessage
4268        implements SaslAuthOrBuilder {
4269      // Use SaslAuth.newBuilder() to construct.
4270      private SaslAuth(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4271        super(builder);
4272        this.unknownFields = builder.getUnknownFields();
4273      }
4274      private SaslAuth(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4275
4276      private static final SaslAuth defaultInstance;
4277      public static SaslAuth getDefaultInstance() {
4278        return defaultInstance;
4279      }
4280
4281      public SaslAuth getDefaultInstanceForType() {
4282        return defaultInstance;
4283      }
4284
4285      private final com.google.protobuf.UnknownFieldSet unknownFields;
4286      @java.lang.Override
4287      public final com.google.protobuf.UnknownFieldSet
4288          getUnknownFields() {
4289        return this.unknownFields;
4290      }
4291      private SaslAuth(
4292          com.google.protobuf.CodedInputStream input,
4293          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4294          throws com.google.protobuf.InvalidProtocolBufferException {
4295        initFields();
4296        int mutable_bitField0_ = 0;
4297        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4298            com.google.protobuf.UnknownFieldSet.newBuilder();
4299        try {
4300          boolean done = false;
4301          while (!done) {
4302            int tag = input.readTag();
4303            switch (tag) {
4304              case 0:
4305                done = true;
4306                break;
4307              default: {
4308                if (!parseUnknownField(input, unknownFields,
4309                                       extensionRegistry, tag)) {
4310                  done = true;
4311                }
4312                break;
4313              }
4314              case 10: {
4315                bitField0_ |= 0x00000001;
4316                method_ = input.readBytes();
4317                break;
4318              }
4319              case 18: {
4320                bitField0_ |= 0x00000002;
4321                mechanism_ = input.readBytes();
4322                break;
4323              }
4324              case 26: {
4325                bitField0_ |= 0x00000004;
4326                protocol_ = input.readBytes();
4327                break;
4328              }
4329              case 34: {
4330                bitField0_ |= 0x00000008;
4331                serverId_ = input.readBytes();
4332                break;
4333              }
4334              case 42: {
4335                bitField0_ |= 0x00000010;
4336                challenge_ = input.readBytes();
4337                break;
4338              }
4339            }
4340          }
4341        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4342          throw e.setUnfinishedMessage(this);
4343        } catch (java.io.IOException e) {
4344          throw new com.google.protobuf.InvalidProtocolBufferException(
4345              e.getMessage()).setUnfinishedMessage(this);
4346        } finally {
4347          this.unknownFields = unknownFields.build();
4348          makeExtensionsImmutable();
4349        }
4350      }
4351      public static final com.google.protobuf.Descriptors.Descriptor
4352          getDescriptor() {
4353        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
4354      }
4355
4356      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4357          internalGetFieldAccessorTable() {
4358        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable
4359            .ensureFieldAccessorsInitialized(
4360                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder.class);
4361      }
4362
4363      public static com.google.protobuf.Parser<SaslAuth> PARSER =
4364          new com.google.protobuf.AbstractParser<SaslAuth>() {
4365        public SaslAuth parsePartialFrom(
4366            com.google.protobuf.CodedInputStream input,
4367            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4368            throws com.google.protobuf.InvalidProtocolBufferException {
4369          return new SaslAuth(input, extensionRegistry);
4370        }
4371      };
4372
4373      @java.lang.Override
4374      public com.google.protobuf.Parser<SaslAuth> getParserForType() {
4375        return PARSER;
4376      }
4377
4378      private int bitField0_;
4379      // required string method = 1;
4380      public static final int METHOD_FIELD_NUMBER = 1;
4381      private java.lang.Object method_;
4382      /**
4383       * <code>required string method = 1;</code>
4384       */
4385      public boolean hasMethod() {
4386        return ((bitField0_ & 0x00000001) == 0x00000001);
4387      }
4388      /**
4389       * <code>required string method = 1;</code>
4390       */
4391      public java.lang.String getMethod() {
4392        java.lang.Object ref = method_;
4393        if (ref instanceof java.lang.String) {
4394          return (java.lang.String) ref;
4395        } else {
4396          com.google.protobuf.ByteString bs = 
4397              (com.google.protobuf.ByteString) ref;
4398          java.lang.String s = bs.toStringUtf8();
4399          if (bs.isValidUtf8()) {
4400            method_ = s;
4401          }
4402          return s;
4403        }
4404      }
4405      /**
4406       * <code>required string method = 1;</code>
4407       */
4408      public com.google.protobuf.ByteString
4409          getMethodBytes() {
4410        java.lang.Object ref = method_;
4411        if (ref instanceof java.lang.String) {
4412          com.google.protobuf.ByteString b = 
4413              com.google.protobuf.ByteString.copyFromUtf8(
4414                  (java.lang.String) ref);
4415          method_ = b;
4416          return b;
4417        } else {
4418          return (com.google.protobuf.ByteString) ref;
4419        }
4420      }
4421
4422      // required string mechanism = 2;
4423      public static final int MECHANISM_FIELD_NUMBER = 2;
4424      private java.lang.Object mechanism_;
4425      /**
4426       * <code>required string mechanism = 2;</code>
4427       */
4428      public boolean hasMechanism() {
4429        return ((bitField0_ & 0x00000002) == 0x00000002);
4430      }
4431      /**
4432       * <code>required string mechanism = 2;</code>
4433       */
4434      public java.lang.String getMechanism() {
4435        java.lang.Object ref = mechanism_;
4436        if (ref instanceof java.lang.String) {
4437          return (java.lang.String) ref;
4438        } else {
4439          com.google.protobuf.ByteString bs = 
4440              (com.google.protobuf.ByteString) ref;
4441          java.lang.String s = bs.toStringUtf8();
4442          if (bs.isValidUtf8()) {
4443            mechanism_ = s;
4444          }
4445          return s;
4446        }
4447      }
4448      /**
4449       * <code>required string mechanism = 2;</code>
4450       */
4451      public com.google.protobuf.ByteString
4452          getMechanismBytes() {
4453        java.lang.Object ref = mechanism_;
4454        if (ref instanceof java.lang.String) {
4455          com.google.protobuf.ByteString b = 
4456              com.google.protobuf.ByteString.copyFromUtf8(
4457                  (java.lang.String) ref);
4458          mechanism_ = b;
4459          return b;
4460        } else {
4461          return (com.google.protobuf.ByteString) ref;
4462        }
4463      }
4464
4465      // optional string protocol = 3;
4466      public static final int PROTOCOL_FIELD_NUMBER = 3;
4467      private java.lang.Object protocol_;
4468      /**
4469       * <code>optional string protocol = 3;</code>
4470       */
4471      public boolean hasProtocol() {
4472        return ((bitField0_ & 0x00000004) == 0x00000004);
4473      }
4474      /**
4475       * <code>optional string protocol = 3;</code>
4476       */
4477      public java.lang.String getProtocol() {
4478        java.lang.Object ref = protocol_;
4479        if (ref instanceof java.lang.String) {
4480          return (java.lang.String) ref;
4481        } else {
4482          com.google.protobuf.ByteString bs = 
4483              (com.google.protobuf.ByteString) ref;
4484          java.lang.String s = bs.toStringUtf8();
4485          if (bs.isValidUtf8()) {
4486            protocol_ = s;
4487          }
4488          return s;
4489        }
4490      }
4491      /**
4492       * <code>optional string protocol = 3;</code>
4493       */
4494      public com.google.protobuf.ByteString
4495          getProtocolBytes() {
4496        java.lang.Object ref = protocol_;
4497        if (ref instanceof java.lang.String) {
4498          com.google.protobuf.ByteString b = 
4499              com.google.protobuf.ByteString.copyFromUtf8(
4500                  (java.lang.String) ref);
4501          protocol_ = b;
4502          return b;
4503        } else {
4504          return (com.google.protobuf.ByteString) ref;
4505        }
4506      }
4507
4508      // optional string serverId = 4;
4509      public static final int SERVERID_FIELD_NUMBER = 4;
4510      private java.lang.Object serverId_;
4511      /**
4512       * <code>optional string serverId = 4;</code>
4513       */
4514      public boolean hasServerId() {
4515        return ((bitField0_ & 0x00000008) == 0x00000008);
4516      }
4517      /**
4518       * <code>optional string serverId = 4;</code>
4519       */
4520      public java.lang.String getServerId() {
4521        java.lang.Object ref = serverId_;
4522        if (ref instanceof java.lang.String) {
4523          return (java.lang.String) ref;
4524        } else {
4525          com.google.protobuf.ByteString bs = 
4526              (com.google.protobuf.ByteString) ref;
4527          java.lang.String s = bs.toStringUtf8();
4528          if (bs.isValidUtf8()) {
4529            serverId_ = s;
4530          }
4531          return s;
4532        }
4533      }
4534      /**
4535       * <code>optional string serverId = 4;</code>
4536       */
4537      public com.google.protobuf.ByteString
4538          getServerIdBytes() {
4539        java.lang.Object ref = serverId_;
4540        if (ref instanceof java.lang.String) {
4541          com.google.protobuf.ByteString b = 
4542              com.google.protobuf.ByteString.copyFromUtf8(
4543                  (java.lang.String) ref);
4544          serverId_ = b;
4545          return b;
4546        } else {
4547          return (com.google.protobuf.ByteString) ref;
4548        }
4549      }
4550
4551      // optional bytes challenge = 5;
4552      public static final int CHALLENGE_FIELD_NUMBER = 5;
4553      private com.google.protobuf.ByteString challenge_;
4554      /**
4555       * <code>optional bytes challenge = 5;</code>
4556       */
4557      public boolean hasChallenge() {
4558        return ((bitField0_ & 0x00000010) == 0x00000010);
4559      }
4560      /**
4561       * <code>optional bytes challenge = 5;</code>
4562       */
4563      public com.google.protobuf.ByteString getChallenge() {
4564        return challenge_;
4565      }
4566
4567      private void initFields() {
4568        method_ = "";
4569        mechanism_ = "";
4570        protocol_ = "";
4571        serverId_ = "";
4572        challenge_ = com.google.protobuf.ByteString.EMPTY;
4573      }
4574      private byte memoizedIsInitialized = -1;
4575      public final boolean isInitialized() {
4576        byte isInitialized = memoizedIsInitialized;
4577        if (isInitialized != -1) return isInitialized == 1;
4578
4579        if (!hasMethod()) {
4580          memoizedIsInitialized = 0;
4581          return false;
4582        }
4583        if (!hasMechanism()) {
4584          memoizedIsInitialized = 0;
4585          return false;
4586        }
4587        memoizedIsInitialized = 1;
4588        return true;
4589      }
4590
4591      public void writeTo(com.google.protobuf.CodedOutputStream output)
4592                          throws java.io.IOException {
4593        getSerializedSize();
4594        if (((bitField0_ & 0x00000001) == 0x00000001)) {
4595          output.writeBytes(1, getMethodBytes());
4596        }
4597        if (((bitField0_ & 0x00000002) == 0x00000002)) {
4598          output.writeBytes(2, getMechanismBytes());
4599        }
4600        if (((bitField0_ & 0x00000004) == 0x00000004)) {
4601          output.writeBytes(3, getProtocolBytes());
4602        }
4603        if (((bitField0_ & 0x00000008) == 0x00000008)) {
4604          output.writeBytes(4, getServerIdBytes());
4605        }
4606        if (((bitField0_ & 0x00000010) == 0x00000010)) {
4607          output.writeBytes(5, challenge_);
4608        }
4609        getUnknownFields().writeTo(output);
4610      }
4611
4612      private int memoizedSerializedSize = -1;
4613      public int getSerializedSize() {
4614        int size = memoizedSerializedSize;
4615        if (size != -1) return size;
4616
4617        size = 0;
4618        if (((bitField0_ & 0x00000001) == 0x00000001)) {
4619          size += com.google.protobuf.CodedOutputStream
4620            .computeBytesSize(1, getMethodBytes());
4621        }
4622        if (((bitField0_ & 0x00000002) == 0x00000002)) {
4623          size += com.google.protobuf.CodedOutputStream
4624            .computeBytesSize(2, getMechanismBytes());
4625        }
4626        if (((bitField0_ & 0x00000004) == 0x00000004)) {
4627          size += com.google.protobuf.CodedOutputStream
4628            .computeBytesSize(3, getProtocolBytes());
4629        }
4630        if (((bitField0_ & 0x00000008) == 0x00000008)) {
4631          size += com.google.protobuf.CodedOutputStream
4632            .computeBytesSize(4, getServerIdBytes());
4633        }
4634        if (((bitField0_ & 0x00000010) == 0x00000010)) {
4635          size += com.google.protobuf.CodedOutputStream
4636            .computeBytesSize(5, challenge_);
4637        }
4638        size += getUnknownFields().getSerializedSize();
4639        memoizedSerializedSize = size;
4640        return size;
4641      }
4642
4643      private static final long serialVersionUID = 0L;
4644      @java.lang.Override
4645      protected java.lang.Object writeReplace()
4646          throws java.io.ObjectStreamException {
4647        return super.writeReplace();
4648      }
4649
4650      @java.lang.Override
4651      public boolean equals(final java.lang.Object obj) {
4652        if (obj == this) {
4653         return true;
4654        }
4655        if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth)) {
4656          return super.equals(obj);
4657        }
4658        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) obj;
4659
4660        boolean result = true;
4661        result = result && (hasMethod() == other.hasMethod());
4662        if (hasMethod()) {
4663          result = result && getMethod()
4664              .equals(other.getMethod());
4665        }
4666        result = result && (hasMechanism() == other.hasMechanism());
4667        if (hasMechanism()) {
4668          result = result && getMechanism()
4669              .equals(other.getMechanism());
4670        }
4671        result = result && (hasProtocol() == other.hasProtocol());
4672        if (hasProtocol()) {
4673          result = result && getProtocol()
4674              .equals(other.getProtocol());
4675        }
4676        result = result && (hasServerId() == other.hasServerId());
4677        if (hasServerId()) {
4678          result = result && getServerId()
4679              .equals(other.getServerId());
4680        }
4681        result = result && (hasChallenge() == other.hasChallenge());
4682        if (hasChallenge()) {
4683          result = result && getChallenge()
4684              .equals(other.getChallenge());
4685        }
4686        result = result &&
4687            getUnknownFields().equals(other.getUnknownFields());
4688        return result;
4689      }
4690
4691      private int memoizedHashCode = 0;
4692      @java.lang.Override
4693      public int hashCode() {
4694        if (memoizedHashCode != 0) {
4695          return memoizedHashCode;
4696        }
4697        int hash = 41;
4698        hash = (19 * hash) + getDescriptorForType().hashCode();
4699        if (hasMethod()) {
4700          hash = (37 * hash) + METHOD_FIELD_NUMBER;
4701          hash = (53 * hash) + getMethod().hashCode();
4702        }
4703        if (hasMechanism()) {
4704          hash = (37 * hash) + MECHANISM_FIELD_NUMBER;
4705          hash = (53 * hash) + getMechanism().hashCode();
4706        }
4707        if (hasProtocol()) {
4708          hash = (37 * hash) + PROTOCOL_FIELD_NUMBER;
4709          hash = (53 * hash) + getProtocol().hashCode();
4710        }
4711        if (hasServerId()) {
4712          hash = (37 * hash) + SERVERID_FIELD_NUMBER;
4713          hash = (53 * hash) + getServerId().hashCode();
4714        }
4715        if (hasChallenge()) {
4716          hash = (37 * hash) + CHALLENGE_FIELD_NUMBER;
4717          hash = (53 * hash) + getChallenge().hashCode();
4718        }
4719        hash = (29 * hash) + getUnknownFields().hashCode();
4720        memoizedHashCode = hash;
4721        return hash;
4722      }
4723
4724      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
4725          com.google.protobuf.ByteString data)
4726          throws com.google.protobuf.InvalidProtocolBufferException {
4727        return PARSER.parseFrom(data);
4728      }
4729      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
4730          com.google.protobuf.ByteString data,
4731          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4732          throws com.google.protobuf.InvalidProtocolBufferException {
4733        return PARSER.parseFrom(data, extensionRegistry);
4734      }
4735      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(byte[] data)
4736          throws com.google.protobuf.InvalidProtocolBufferException {
4737        return PARSER.parseFrom(data);
4738      }
4739      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
4740          byte[] data,
4741          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4742          throws com.google.protobuf.InvalidProtocolBufferException {
4743        return PARSER.parseFrom(data, extensionRegistry);
4744      }
4745      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(java.io.InputStream input)
4746          throws java.io.IOException {
4747        return PARSER.parseFrom(input);
4748      }
4749      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
4750          java.io.InputStream input,
4751          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4752          throws java.io.IOException {
4753        return PARSER.parseFrom(input, extensionRegistry);
4754      }
4755      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseDelimitedFrom(java.io.InputStream input)
4756          throws java.io.IOException {
4757        return PARSER.parseDelimitedFrom(input);
4758      }
4759      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseDelimitedFrom(
4760          java.io.InputStream input,
4761          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4762          throws java.io.IOException {
4763        return PARSER.parseDelimitedFrom(input, extensionRegistry);
4764      }
4765      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
4766          com.google.protobuf.CodedInputStream input)
4767          throws java.io.IOException {
4768        return PARSER.parseFrom(input);
4769      }
4770      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
4771          com.google.protobuf.CodedInputStream input,
4772          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4773          throws java.io.IOException {
4774        return PARSER.parseFrom(input, extensionRegistry);
4775      }
4776
4777      public static Builder newBuilder() { return Builder.create(); }
4778      public Builder newBuilderForType() { return newBuilder(); }
4779      public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth prototype) {
4780        return newBuilder().mergeFrom(prototype);
4781      }
4782      public Builder toBuilder() { return newBuilder(this); }
4783
4784      @java.lang.Override
4785      protected Builder newBuilderForType(
4786          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4787        Builder builder = new Builder(parent);
4788        return builder;
4789      }
4790      /**
4791       * Protobuf type {@code hadoop.common.RpcSaslProto.SaslAuth}
4792       */
4793      public static final class Builder extends
4794          com.google.protobuf.GeneratedMessage.Builder<Builder>
4795         implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder {
4796        public static final com.google.protobuf.Descriptors.Descriptor
4797            getDescriptor() {
4798          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
4799        }
4800
4801        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4802            internalGetFieldAccessorTable() {
4803          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable
4804              .ensureFieldAccessorsInitialized(
4805                  org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder.class);
4806        }
4807
4808        // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.newBuilder()
4809        private Builder() {
4810          maybeForceBuilderInitialization();
4811        }
4812
4813        private Builder(
4814            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4815          super(parent);
4816          maybeForceBuilderInitialization();
4817        }
4818        private void maybeForceBuilderInitialization() {
4819          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4820          }
4821        }
4822        private static Builder create() {
4823          return new Builder();
4824        }
4825
4826        public Builder clear() {
4827          super.clear();
4828          method_ = "";
4829          bitField0_ = (bitField0_ & ~0x00000001);
4830          mechanism_ = "";
4831          bitField0_ = (bitField0_ & ~0x00000002);
4832          protocol_ = "";
4833          bitField0_ = (bitField0_ & ~0x00000004);
4834          serverId_ = "";
4835          bitField0_ = (bitField0_ & ~0x00000008);
4836          challenge_ = com.google.protobuf.ByteString.EMPTY;
4837          bitField0_ = (bitField0_ & ~0x00000010);
4838          return this;
4839        }
4840
4841        public Builder clone() {
4842          return create().mergeFrom(buildPartial());
4843        }
4844
4845        public com.google.protobuf.Descriptors.Descriptor
4846            getDescriptorForType() {
4847          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
4848        }
4849
4850        public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getDefaultInstanceForType() {
4851          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance();
4852        }
4853
4854        public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth build() {
4855          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result = buildPartial();
4856          if (!result.isInitialized()) {
4857            throw newUninitializedMessageException(result);
4858          }
4859          return result;
4860        }
4861
4862        public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth buildPartial() {
4863          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth(this);
4864          int from_bitField0_ = bitField0_;
4865          int to_bitField0_ = 0;
4866          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4867            to_bitField0_ |= 0x00000001;
4868          }
4869          result.method_ = method_;
4870          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4871            to_bitField0_ |= 0x00000002;
4872          }
4873          result.mechanism_ = mechanism_;
4874          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4875            to_bitField0_ |= 0x00000004;
4876          }
4877          result.protocol_ = protocol_;
4878          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
4879            to_bitField0_ |= 0x00000008;
4880          }
4881          result.serverId_ = serverId_;
4882          if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
4883            to_bitField0_ |= 0x00000010;
4884          }
4885          result.challenge_ = challenge_;
4886          result.bitField0_ = to_bitField0_;
4887          onBuilt();
4888          return result;
4889        }
4890
4891        public Builder mergeFrom(com.google.protobuf.Message other) {
4892          if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) {
4893            return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth)other);
4894          } else {
4895            super.mergeFrom(other);
4896            return this;
4897          }
4898        }
4899
4900        public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth other) {
4901          if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance()) return this;
4902          if (other.hasMethod()) {
4903            bitField0_ |= 0x00000001;
4904            method_ = other.method_;
4905            onChanged();
4906          }
4907          if (other.hasMechanism()) {
4908            bitField0_ |= 0x00000002;
4909            mechanism_ = other.mechanism_;
4910            onChanged();
4911          }
4912          if (other.hasProtocol()) {
4913            bitField0_ |= 0x00000004;
4914            protocol_ = other.protocol_;
4915            onChanged();
4916          }
4917          if (other.hasServerId()) {
4918            bitField0_ |= 0x00000008;
4919            serverId_ = other.serverId_;
4920            onChanged();
4921          }
4922          if (other.hasChallenge()) {
4923            setChallenge(other.getChallenge());
4924          }
4925          this.mergeUnknownFields(other.getUnknownFields());
4926          return this;
4927        }
4928
4929        public final boolean isInitialized() {
4930          if (!hasMethod()) {
4931            
4932            return false;
4933          }
4934          if (!hasMechanism()) {
4935            
4936            return false;
4937          }
4938          return true;
4939        }
4940
4941        public Builder mergeFrom(
4942            com.google.protobuf.CodedInputStream input,
4943            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4944            throws java.io.IOException {
4945          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parsedMessage = null;
4946          try {
4947            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4948          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4949            parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) e.getUnfinishedMessage();
4950            throw e;
4951          } finally {
4952            if (parsedMessage != null) {
4953              mergeFrom(parsedMessage);
4954            }
4955          }
4956          return this;
4957        }
4958        private int bitField0_;
4959
4960        // required string method = 1;
4961        private java.lang.Object method_ = "";
4962        /**
4963         * <code>required string method = 1;</code>
4964         */
4965        public boolean hasMethod() {
4966          return ((bitField0_ & 0x00000001) == 0x00000001);
4967        }
4968        /**
4969         * <code>required string method = 1;</code>
4970         */
4971        public java.lang.String getMethod() {
4972          java.lang.Object ref = method_;
4973          if (!(ref instanceof java.lang.String)) {
4974            java.lang.String s = ((com.google.protobuf.ByteString) ref)
4975                .toStringUtf8();
4976            method_ = s;
4977            return s;
4978          } else {
4979            return (java.lang.String) ref;
4980          }
4981        }
4982        /**
4983         * <code>required string method = 1;</code>
4984         */
4985        public com.google.protobuf.ByteString
4986            getMethodBytes() {
4987          java.lang.Object ref = method_;
4988          if (ref instanceof String) {
4989            com.google.protobuf.ByteString b = 
4990                com.google.protobuf.ByteString.copyFromUtf8(
4991                    (java.lang.String) ref);
4992            method_ = b;
4993            return b;
4994          } else {
4995            return (com.google.protobuf.ByteString) ref;
4996          }
4997        }
4998        /**
4999         * <code>required string method = 1;</code>
5000         */
5001        public Builder setMethod(
5002            java.lang.String value) {
5003          if (value == null) {
5004    throw new NullPointerException();
5005  }
5006  bitField0_ |= 0x00000001;
5007          method_ = value;
5008          onChanged();
5009          return this;
5010        }
5011        /**
5012         * <code>required string method = 1;</code>
5013         */
5014        public Builder clearMethod() {
5015          bitField0_ = (bitField0_ & ~0x00000001);
5016          method_ = getDefaultInstance().getMethod();
5017          onChanged();
5018          return this;
5019        }
5020        /**
5021         * <code>required string method = 1;</code>
5022         */
5023        public Builder setMethodBytes(
5024            com.google.protobuf.ByteString value) {
5025          if (value == null) {
5026    throw new NullPointerException();
5027  }
5028  bitField0_ |= 0x00000001;
5029          method_ = value;
5030          onChanged();
5031          return this;
5032        }
5033
5034        // required string mechanism = 2;
5035        private java.lang.Object mechanism_ = "";
5036        /**
5037         * <code>required string mechanism = 2;</code>
5038         */
5039        public boolean hasMechanism() {
5040          return ((bitField0_ & 0x00000002) == 0x00000002);
5041        }
5042        /**
5043         * <code>required string mechanism = 2;</code>
5044         */
5045        public java.lang.String getMechanism() {
5046          java.lang.Object ref = mechanism_;
5047          if (!(ref instanceof java.lang.String)) {
5048            java.lang.String s = ((com.google.protobuf.ByteString) ref)
5049                .toStringUtf8();
5050            mechanism_ = s;
5051            return s;
5052          } else {
5053            return (java.lang.String) ref;
5054          }
5055        }
5056        /**
5057         * <code>required string mechanism = 2;</code>
5058         */
5059        public com.google.protobuf.ByteString
5060            getMechanismBytes() {
5061          java.lang.Object ref = mechanism_;
5062          if (ref instanceof String) {
5063            com.google.protobuf.ByteString b = 
5064                com.google.protobuf.ByteString.copyFromUtf8(
5065                    (java.lang.String) ref);
5066            mechanism_ = b;
5067            return b;
5068          } else {
5069            return (com.google.protobuf.ByteString) ref;
5070          }
5071        }
5072        /**
5073         * <code>required string mechanism = 2;</code>
5074         */
5075        public Builder setMechanism(
5076            java.lang.String value) {
5077          if (value == null) {
5078    throw new NullPointerException();
5079  }
5080  bitField0_ |= 0x00000002;
5081          mechanism_ = value;
5082          onChanged();
5083          return this;
5084        }
5085        /**
5086         * <code>required string mechanism = 2;</code>
5087         */
5088        public Builder clearMechanism() {
5089          bitField0_ = (bitField0_ & ~0x00000002);
5090          mechanism_ = getDefaultInstance().getMechanism();
5091          onChanged();
5092          return this;
5093        }
5094        /**
5095         * <code>required string mechanism = 2;</code>
5096         */
5097        public Builder setMechanismBytes(
5098            com.google.protobuf.ByteString value) {
5099          if (value == null) {
5100    throw new NullPointerException();
5101  }
5102  bitField0_ |= 0x00000002;
5103          mechanism_ = value;
5104          onChanged();
5105          return this;
5106        }
5107
5108        // optional string protocol = 3;
5109        private java.lang.Object protocol_ = "";
5110        /**
5111         * <code>optional string protocol = 3;</code>
5112         */
5113        public boolean hasProtocol() {
5114          return ((bitField0_ & 0x00000004) == 0x00000004);
5115        }
5116        /**
5117         * <code>optional string protocol = 3;</code>
5118         */
5119        public java.lang.String getProtocol() {
5120          java.lang.Object ref = protocol_;
5121          if (!(ref instanceof java.lang.String)) {
5122            java.lang.String s = ((com.google.protobuf.ByteString) ref)
5123                .toStringUtf8();
5124            protocol_ = s;
5125            return s;
5126          } else {
5127            return (java.lang.String) ref;
5128          }
5129        }
5130        /**
5131         * <code>optional string protocol = 3;</code>
5132         */
5133        public com.google.protobuf.ByteString
5134            getProtocolBytes() {
5135          java.lang.Object ref = protocol_;
5136          if (ref instanceof String) {
5137            com.google.protobuf.ByteString b = 
5138                com.google.protobuf.ByteString.copyFromUtf8(
5139                    (java.lang.String) ref);
5140            protocol_ = b;
5141            return b;
5142          } else {
5143            return (com.google.protobuf.ByteString) ref;
5144          }
5145        }
5146        /**
5147         * <code>optional string protocol = 3;</code>
5148         */
5149        public Builder setProtocol(
5150            java.lang.String value) {
5151          if (value == null) {
5152    throw new NullPointerException();
5153  }
5154  bitField0_ |= 0x00000004;
5155          protocol_ = value;
5156          onChanged();
5157          return this;
5158        }
5159        /**
5160         * <code>optional string protocol = 3;</code>
5161         */
5162        public Builder clearProtocol() {
5163          bitField0_ = (bitField0_ & ~0x00000004);
5164          protocol_ = getDefaultInstance().getProtocol();
5165          onChanged();
5166          return this;
5167        }
5168        /**
5169         * <code>optional string protocol = 3;</code>
5170         */
5171        public Builder setProtocolBytes(
5172            com.google.protobuf.ByteString value) {
5173          if (value == null) {
5174    throw new NullPointerException();
5175  }
5176  bitField0_ |= 0x00000004;
5177          protocol_ = value;
5178          onChanged();
5179          return this;
5180        }
5181
5182        // optional string serverId = 4;
5183        private java.lang.Object serverId_ = "";
5184        /**
5185         * <code>optional string serverId = 4;</code>
5186         */
5187        public boolean hasServerId() {
5188          return ((bitField0_ & 0x00000008) == 0x00000008);
5189        }
5190        /**
5191         * <code>optional string serverId = 4;</code>
5192         */
5193        public java.lang.String getServerId() {
5194          java.lang.Object ref = serverId_;
5195          if (!(ref instanceof java.lang.String)) {
5196            java.lang.String s = ((com.google.protobuf.ByteString) ref)
5197                .toStringUtf8();
5198            serverId_ = s;
5199            return s;
5200          } else {
5201            return (java.lang.String) ref;
5202          }
5203        }
5204        /**
5205         * <code>optional string serverId = 4;</code>
5206         */
5207        public com.google.protobuf.ByteString
5208            getServerIdBytes() {
5209          java.lang.Object ref = serverId_;
5210          if (ref instanceof String) {
5211            com.google.protobuf.ByteString b = 
5212                com.google.protobuf.ByteString.copyFromUtf8(
5213                    (java.lang.String) ref);
5214            serverId_ = b;
5215            return b;
5216          } else {
5217            return (com.google.protobuf.ByteString) ref;
5218          }
5219        }
5220        /**
5221         * <code>optional string serverId = 4;</code>
5222         */
5223        public Builder setServerId(
5224            java.lang.String value) {
5225          if (value == null) {
5226    throw new NullPointerException();
5227  }
5228  bitField0_ |= 0x00000008;
5229          serverId_ = value;
5230          onChanged();
5231          return this;
5232        }
5233        /**
5234         * <code>optional string serverId = 4;</code>
5235         */
5236        public Builder clearServerId() {
5237          bitField0_ = (bitField0_ & ~0x00000008);
5238          serverId_ = getDefaultInstance().getServerId();
5239          onChanged();
5240          return this;
5241        }
5242        /**
5243         * <code>optional string serverId = 4;</code>
5244         */
5245        public Builder setServerIdBytes(
5246            com.google.protobuf.ByteString value) {
5247          if (value == null) {
5248    throw new NullPointerException();
5249  }
5250  bitField0_ |= 0x00000008;
5251          serverId_ = value;
5252          onChanged();
5253          return this;
5254        }
5255
5256        // optional bytes challenge = 5;
5257        private com.google.protobuf.ByteString challenge_ = com.google.protobuf.ByteString.EMPTY;
5258        /**
5259         * <code>optional bytes challenge = 5;</code>
5260         */
5261        public boolean hasChallenge() {
5262          return ((bitField0_ & 0x00000010) == 0x00000010);
5263        }
5264        /**
5265         * <code>optional bytes challenge = 5;</code>
5266         */
5267        public com.google.protobuf.ByteString getChallenge() {
5268          return challenge_;
5269        }
5270        /**
5271         * <code>optional bytes challenge = 5;</code>
5272         */
5273        public Builder setChallenge(com.google.protobuf.ByteString value) {
5274          if (value == null) {
5275    throw new NullPointerException();
5276  }
5277  bitField0_ |= 0x00000010;
5278          challenge_ = value;
5279          onChanged();
5280          return this;
5281        }
5282        /**
5283         * <code>optional bytes challenge = 5;</code>
5284         */
5285        public Builder clearChallenge() {
5286          bitField0_ = (bitField0_ & ~0x00000010);
5287          challenge_ = getDefaultInstance().getChallenge();
5288          onChanged();
5289          return this;
5290        }
5291
5292        // @@protoc_insertion_point(builder_scope:hadoop.common.RpcSaslProto.SaslAuth)
5293      }
5294
5295      static {
5296        defaultInstance = new SaslAuth(true);
5297        defaultInstance.initFields();
5298      }
5299
5300      // @@protoc_insertion_point(class_scope:hadoop.common.RpcSaslProto.SaslAuth)
5301    }
5302
5303    private int bitField0_;
5304    // optional uint32 version = 1;
5305    public static final int VERSION_FIELD_NUMBER = 1;
5306    private int version_;
5307    /**
5308     * <code>optional uint32 version = 1;</code>
5309     */
5310    public boolean hasVersion() {
5311      return ((bitField0_ & 0x00000001) == 0x00000001);
5312    }
5313    /**
5314     * <code>optional uint32 version = 1;</code>
5315     */
5316    public int getVersion() {
5317      return version_;
5318    }
5319
5320    // required .hadoop.common.RpcSaslProto.SaslState state = 2;
5321    public static final int STATE_FIELD_NUMBER = 2;
5322    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState state_;
5323    /**
5324     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
5325     */
5326    public boolean hasState() {
5327      return ((bitField0_ & 0x00000002) == 0x00000002);
5328    }
5329    /**
5330     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
5331     */
5332    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState() {
5333      return state_;
5334    }
5335
5336    // optional bytes token = 3;
5337    public static final int TOKEN_FIELD_NUMBER = 3;
5338    private com.google.protobuf.ByteString token_;
5339    /**
5340     * <code>optional bytes token = 3;</code>
5341     */
5342    public boolean hasToken() {
5343      return ((bitField0_ & 0x00000004) == 0x00000004);
5344    }
5345    /**
5346     * <code>optional bytes token = 3;</code>
5347     */
5348    public com.google.protobuf.ByteString getToken() {
5349      return token_;
5350    }
5351
5352    // repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
5353    public static final int AUTHS_FIELD_NUMBER = 4;
5354    private java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> auths_;
5355    /**
5356     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5357     */
5358    public java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> getAuthsList() {
5359      return auths_;
5360    }
5361    /**
5362     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5363     */
5364    public java.util.List<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
5365        getAuthsOrBuilderList() {
5366      return auths_;
5367    }
5368    /**
5369     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5370     */
5371    public int getAuthsCount() {
5372      return auths_.size();
5373    }
5374    /**
5375     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5376     */
5377    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index) {
5378      return auths_.get(index);
5379    }
5380    /**
5381     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5382     */
5383    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
5384        int index) {
5385      return auths_.get(index);
5386    }
5387
5388    private void initFields() {
5389      version_ = 0;
5390      state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
5391      token_ = com.google.protobuf.ByteString.EMPTY;
5392      auths_ = java.util.Collections.emptyList();
5393    }
5394    private byte memoizedIsInitialized = -1;
5395    public final boolean isInitialized() {
5396      byte isInitialized = memoizedIsInitialized;
5397      if (isInitialized != -1) return isInitialized == 1;
5398
5399      if (!hasState()) {
5400        memoizedIsInitialized = 0;
5401        return false;
5402      }
5403      for (int i = 0; i < getAuthsCount(); i++) {
5404        if (!getAuths(i).isInitialized()) {
5405          memoizedIsInitialized = 0;
5406          return false;
5407        }
5408      }
5409      memoizedIsInitialized = 1;
5410      return true;
5411    }
5412
5413    public void writeTo(com.google.protobuf.CodedOutputStream output)
5414                        throws java.io.IOException {
5415      getSerializedSize();
5416      if (((bitField0_ & 0x00000001) == 0x00000001)) {
5417        output.writeUInt32(1, version_);
5418      }
5419      if (((bitField0_ & 0x00000002) == 0x00000002)) {
5420        output.writeEnum(2, state_.getNumber());
5421      }
5422      if (((bitField0_ & 0x00000004) == 0x00000004)) {
5423        output.writeBytes(3, token_);
5424      }
5425      for (int i = 0; i < auths_.size(); i++) {
5426        output.writeMessage(4, auths_.get(i));
5427      }
5428      getUnknownFields().writeTo(output);
5429    }
5430
5431    private int memoizedSerializedSize = -1;
5432    public int getSerializedSize() {
5433      int size = memoizedSerializedSize;
5434      if (size != -1) return size;
5435
5436      size = 0;
5437      if (((bitField0_ & 0x00000001) == 0x00000001)) {
5438        size += com.google.protobuf.CodedOutputStream
5439          .computeUInt32Size(1, version_);
5440      }
5441      if (((bitField0_ & 0x00000002) == 0x00000002)) {
5442        size += com.google.protobuf.CodedOutputStream
5443          .computeEnumSize(2, state_.getNumber());
5444      }
5445      if (((bitField0_ & 0x00000004) == 0x00000004)) {
5446        size += com.google.protobuf.CodedOutputStream
5447          .computeBytesSize(3, token_);
5448      }
5449      for (int i = 0; i < auths_.size(); i++) {
5450        size += com.google.protobuf.CodedOutputStream
5451          .computeMessageSize(4, auths_.get(i));
5452      }
5453      size += getUnknownFields().getSerializedSize();
5454      memoizedSerializedSize = size;
5455      return size;
5456    }
5457
5458    private static final long serialVersionUID = 0L;
5459    @java.lang.Override
5460    protected java.lang.Object writeReplace()
5461        throws java.io.ObjectStreamException {
5462      return super.writeReplace();
5463    }
5464
5465    @java.lang.Override
5466    public boolean equals(final java.lang.Object obj) {
5467      if (obj == this) {
5468       return true;
5469      }
5470      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto)) {
5471        return super.equals(obj);
5472      }
5473      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) obj;
5474
5475      boolean result = true;
5476      result = result && (hasVersion() == other.hasVersion());
5477      if (hasVersion()) {
5478        result = result && (getVersion()
5479            == other.getVersion());
5480      }
5481      result = result && (hasState() == other.hasState());
5482      if (hasState()) {
5483        result = result &&
5484            (getState() == other.getState());
5485      }
5486      result = result && (hasToken() == other.hasToken());
5487      if (hasToken()) {
5488        result = result && getToken()
5489            .equals(other.getToken());
5490      }
5491      result = result && getAuthsList()
5492          .equals(other.getAuthsList());
5493      result = result &&
5494          getUnknownFields().equals(other.getUnknownFields());
5495      return result;
5496    }
5497
5498    private int memoizedHashCode = 0;
5499    @java.lang.Override
5500    public int hashCode() {
5501      if (memoizedHashCode != 0) {
5502        return memoizedHashCode;
5503      }
5504      int hash = 41;
5505      hash = (19 * hash) + getDescriptorForType().hashCode();
5506      if (hasVersion()) {
5507        hash = (37 * hash) + VERSION_FIELD_NUMBER;
5508        hash = (53 * hash) + getVersion();
5509      }
5510      if (hasState()) {
5511        hash = (37 * hash) + STATE_FIELD_NUMBER;
5512        hash = (53 * hash) + hashEnum(getState());
5513      }
5514      if (hasToken()) {
5515        hash = (37 * hash) + TOKEN_FIELD_NUMBER;
5516        hash = (53 * hash) + getToken().hashCode();
5517      }
5518      if (getAuthsCount() > 0) {
5519        hash = (37 * hash) + AUTHS_FIELD_NUMBER;
5520        hash = (53 * hash) + getAuthsList().hashCode();
5521      }
5522      hash = (29 * hash) + getUnknownFields().hashCode();
5523      memoizedHashCode = hash;
5524      return hash;
5525    }
5526
5527    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
5528        com.google.protobuf.ByteString data)
5529        throws com.google.protobuf.InvalidProtocolBufferException {
5530      return PARSER.parseFrom(data);
5531    }
5532    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
5533        com.google.protobuf.ByteString data,
5534        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5535        throws com.google.protobuf.InvalidProtocolBufferException {
5536      return PARSER.parseFrom(data, extensionRegistry);
5537    }
5538    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(byte[] data)
5539        throws com.google.protobuf.InvalidProtocolBufferException {
5540      return PARSER.parseFrom(data);
5541    }
5542    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
5543        byte[] data,
5544        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5545        throws com.google.protobuf.InvalidProtocolBufferException {
5546      return PARSER.parseFrom(data, extensionRegistry);
5547    }
5548    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(java.io.InputStream input)
5549        throws java.io.IOException {
5550      return PARSER.parseFrom(input);
5551    }
5552    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
5553        java.io.InputStream input,
5554        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5555        throws java.io.IOException {
5556      return PARSER.parseFrom(input, extensionRegistry);
5557    }
5558    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseDelimitedFrom(java.io.InputStream input)
5559        throws java.io.IOException {
5560      return PARSER.parseDelimitedFrom(input);
5561    }
5562    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseDelimitedFrom(
5563        java.io.InputStream input,
5564        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5565        throws java.io.IOException {
5566      return PARSER.parseDelimitedFrom(input, extensionRegistry);
5567    }
5568    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
5569        com.google.protobuf.CodedInputStream input)
5570        throws java.io.IOException {
5571      return PARSER.parseFrom(input);
5572    }
5573    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
5574        com.google.protobuf.CodedInputStream input,
5575        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5576        throws java.io.IOException {
5577      return PARSER.parseFrom(input, extensionRegistry);
5578    }
5579
5580    public static Builder newBuilder() { return Builder.create(); }
5581    public Builder newBuilderForType() { return newBuilder(); }
5582    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto prototype) {
5583      return newBuilder().mergeFrom(prototype);
5584    }
5585    public Builder toBuilder() { return newBuilder(this); }
5586
5587    @java.lang.Override
5588    protected Builder newBuilderForType(
5589        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5590      Builder builder = new Builder(parent);
5591      return builder;
5592    }
5593    /**
5594     * Protobuf type {@code hadoop.common.RpcSaslProto}
5595     */
5596    public static final class Builder extends
5597        com.google.protobuf.GeneratedMessage.Builder<Builder>
5598       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProtoOrBuilder {
5599      public static final com.google.protobuf.Descriptors.Descriptor
5600          getDescriptor() {
5601        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
5602      }
5603
5604      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5605          internalGetFieldAccessorTable() {
5606        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable
5607            .ensureFieldAccessorsInitialized(
5608                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.Builder.class);
5609      }
5610
5611      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.newBuilder()
5612      private Builder() {
5613        maybeForceBuilderInitialization();
5614      }
5615
5616      private Builder(
5617          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5618        super(parent);
5619        maybeForceBuilderInitialization();
5620      }
5621      private void maybeForceBuilderInitialization() {
5622        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5623          getAuthsFieldBuilder();
5624        }
5625      }
5626      private static Builder create() {
5627        return new Builder();
5628      }
5629
5630      public Builder clear() {
5631        super.clear();
5632        version_ = 0;
5633        bitField0_ = (bitField0_ & ~0x00000001);
5634        state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
5635        bitField0_ = (bitField0_ & ~0x00000002);
5636        token_ = com.google.protobuf.ByteString.EMPTY;
5637        bitField0_ = (bitField0_ & ~0x00000004);
5638        if (authsBuilder_ == null) {
5639          auths_ = java.util.Collections.emptyList();
5640          bitField0_ = (bitField0_ & ~0x00000008);
5641        } else {
5642          authsBuilder_.clear();
5643        }
5644        return this;
5645      }
5646
5647      public Builder clone() {
5648        return create().mergeFrom(buildPartial());
5649      }
5650
5651      public com.google.protobuf.Descriptors.Descriptor
5652          getDescriptorForType() {
5653        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
5654      }
5655
5656      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto getDefaultInstanceForType() {
5657        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDefaultInstance();
5658      }
5659
5660      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto build() {
5661        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result = buildPartial();
5662        if (!result.isInitialized()) {
5663          throw newUninitializedMessageException(result);
5664        }
5665        return result;
5666      }
5667
5668      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto buildPartial() {
5669        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto(this);
5670        int from_bitField0_ = bitField0_;
5671        int to_bitField0_ = 0;
5672        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
5673          to_bitField0_ |= 0x00000001;
5674        }
5675        result.version_ = version_;
5676        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
5677          to_bitField0_ |= 0x00000002;
5678        }
5679        result.state_ = state_;
5680        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
5681          to_bitField0_ |= 0x00000004;
5682        }
5683        result.token_ = token_;
5684        if (authsBuilder_ == null) {
5685          if (((bitField0_ & 0x00000008) == 0x00000008)) {
5686            auths_ = java.util.Collections.unmodifiableList(auths_);
5687            bitField0_ = (bitField0_ & ~0x00000008);
5688          }
5689          result.auths_ = auths_;
5690        } else {
5691          result.auths_ = authsBuilder_.build();
5692        }
5693        result.bitField0_ = to_bitField0_;
5694        onBuilt();
5695        return result;
5696      }
5697
5698      public Builder mergeFrom(com.google.protobuf.Message other) {
5699        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) {
5700          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto)other);
5701        } else {
5702          super.mergeFrom(other);
5703          return this;
5704        }
5705      }
5706
5707      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto other) {
5708        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDefaultInstance()) return this;
5709        if (other.hasVersion()) {
5710          setVersion(other.getVersion());
5711        }
5712        if (other.hasState()) {
5713          setState(other.getState());
5714        }
5715        if (other.hasToken()) {
5716          setToken(other.getToken());
5717        }
5718        if (authsBuilder_ == null) {
5719          if (!other.auths_.isEmpty()) {
5720            if (auths_.isEmpty()) {
5721              auths_ = other.auths_;
5722              bitField0_ = (bitField0_ & ~0x00000008);
5723            } else {
5724              ensureAuthsIsMutable();
5725              auths_.addAll(other.auths_);
5726            }
5727            onChanged();
5728          }
5729        } else {
5730          if (!other.auths_.isEmpty()) {
5731            if (authsBuilder_.isEmpty()) {
5732              authsBuilder_.dispose();
5733              authsBuilder_ = null;
5734              auths_ = other.auths_;
5735              bitField0_ = (bitField0_ & ~0x00000008);
5736              authsBuilder_ = 
5737                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
5738                   getAuthsFieldBuilder() : null;
5739            } else {
5740              authsBuilder_.addAllMessages(other.auths_);
5741            }
5742          }
5743        }
5744        this.mergeUnknownFields(other.getUnknownFields());
5745        return this;
5746      }
5747
5748      public final boolean isInitialized() {
5749        if (!hasState()) {
5750          
5751          return false;
5752        }
5753        for (int i = 0; i < getAuthsCount(); i++) {
5754          if (!getAuths(i).isInitialized()) {
5755            
5756            return false;
5757          }
5758        }
5759        return true;
5760      }
5761
5762      public Builder mergeFrom(
5763          com.google.protobuf.CodedInputStream input,
5764          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5765          throws java.io.IOException {
5766        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parsedMessage = null;
5767        try {
5768          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5769        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5770          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) e.getUnfinishedMessage();
5771          throw e;
5772        } finally {
5773          if (parsedMessage != null) {
5774            mergeFrom(parsedMessage);
5775          }
5776        }
5777        return this;
5778      }
5779      private int bitField0_;
5780
5781      // optional uint32 version = 1;
5782      private int version_ ;
5783      /**
5784       * <code>optional uint32 version = 1;</code>
5785       */
5786      public boolean hasVersion() {
5787        return ((bitField0_ & 0x00000001) == 0x00000001);
5788      }
5789      /**
5790       * <code>optional uint32 version = 1;</code>
5791       */
5792      public int getVersion() {
5793        return version_;
5794      }
5795      /**
5796       * <code>optional uint32 version = 1;</code>
5797       */
5798      public Builder setVersion(int value) {
5799        bitField0_ |= 0x00000001;
5800        version_ = value;
5801        onChanged();
5802        return this;
5803      }
5804      /**
5805       * <code>optional uint32 version = 1;</code>
5806       */
5807      public Builder clearVersion() {
5808        bitField0_ = (bitField0_ & ~0x00000001);
5809        version_ = 0;
5810        onChanged();
5811        return this;
5812      }
5813
5814      // required .hadoop.common.RpcSaslProto.SaslState state = 2;
5815      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
5816      /**
5817       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
5818       */
5819      public boolean hasState() {
5820        return ((bitField0_ & 0x00000002) == 0x00000002);
5821      }
5822      /**
5823       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
5824       */
5825      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState() {
5826        return state_;
5827      }
5828      /**
5829       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
5830       */
5831      public Builder setState(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState value) {
5832        if (value == null) {
5833          throw new NullPointerException();
5834        }
5835        bitField0_ |= 0x00000002;
5836        state_ = value;
5837        onChanged();
5838        return this;
5839      }
5840      /**
5841       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
5842       */
5843      public Builder clearState() {
5844        bitField0_ = (bitField0_ & ~0x00000002);
5845        state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
5846        onChanged();
5847        return this;
5848      }
5849
5850      // optional bytes token = 3;
5851      private com.google.protobuf.ByteString token_ = com.google.protobuf.ByteString.EMPTY;
5852      /**
5853       * <code>optional bytes token = 3;</code>
5854       */
5855      public boolean hasToken() {
5856        return ((bitField0_ & 0x00000004) == 0x00000004);
5857      }
5858      /**
5859       * <code>optional bytes token = 3;</code>
5860       */
5861      public com.google.protobuf.ByteString getToken() {
5862        return token_;
5863      }
5864      /**
5865       * <code>optional bytes token = 3;</code>
5866       */
5867      public Builder setToken(com.google.protobuf.ByteString value) {
5868        if (value == null) {
5869    throw new NullPointerException();
5870  }
5871  bitField0_ |= 0x00000004;
5872        token_ = value;
5873        onChanged();
5874        return this;
5875      }
5876      /**
5877       * <code>optional bytes token = 3;</code>
5878       */
5879      public Builder clearToken() {
5880        bitField0_ = (bitField0_ & ~0x00000004);
5881        token_ = getDefaultInstance().getToken();
5882        onChanged();
5883        return this;
5884      }
5885
5886      // repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
5887      private java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> auths_ =
5888        java.util.Collections.emptyList();
5889      private void ensureAuthsIsMutable() {
5890        if (!((bitField0_ & 0x00000008) == 0x00000008)) {
5891          auths_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth>(auths_);
5892          bitField0_ |= 0x00000008;
5893         }
5894      }
5895
5896      private com.google.protobuf.RepeatedFieldBuilder<
5897          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> authsBuilder_;
5898
5899      /**
5900       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5901       */
5902      public java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> getAuthsList() {
5903        if (authsBuilder_ == null) {
5904          return java.util.Collections.unmodifiableList(auths_);
5905        } else {
5906          return authsBuilder_.getMessageList();
5907        }
5908      }
5909      /**
5910       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5911       */
5912      public int getAuthsCount() {
5913        if (authsBuilder_ == null) {
5914          return auths_.size();
5915        } else {
5916          return authsBuilder_.getCount();
5917        }
5918      }
5919      /**
5920       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5921       */
5922      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index) {
5923        if (authsBuilder_ == null) {
5924          return auths_.get(index);
5925        } else {
5926          return authsBuilder_.getMessage(index);
5927        }
5928      }
5929      /**
5930       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5931       */
5932      public Builder setAuths(
5933          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
5934        if (authsBuilder_ == null) {
5935          if (value == null) {
5936            throw new NullPointerException();
5937          }
5938          ensureAuthsIsMutable();
5939          auths_.set(index, value);
5940          onChanged();
5941        } else {
5942          authsBuilder_.setMessage(index, value);
5943        }
5944        return this;
5945      }
5946      /**
5947       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5948       */
5949      public Builder setAuths(
5950          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
5951        if (authsBuilder_ == null) {
5952          ensureAuthsIsMutable();
5953          auths_.set(index, builderForValue.build());
5954          onChanged();
5955        } else {
5956          authsBuilder_.setMessage(index, builderForValue.build());
5957        }
5958        return this;
5959      }
5960      /**
5961       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5962       */
5963      public Builder addAuths(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
5964        if (authsBuilder_ == null) {
5965          if (value == null) {
5966            throw new NullPointerException();
5967          }
5968          ensureAuthsIsMutable();
5969          auths_.add(value);
5970          onChanged();
5971        } else {
5972          authsBuilder_.addMessage(value);
5973        }
5974        return this;
5975      }
5976      /**
5977       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5978       */
5979      public Builder addAuths(
5980          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
5981        if (authsBuilder_ == null) {
5982          if (value == null) {
5983            throw new NullPointerException();
5984          }
5985          ensureAuthsIsMutable();
5986          auths_.add(index, value);
5987          onChanged();
5988        } else {
5989          authsBuilder_.addMessage(index, value);
5990        }
5991        return this;
5992      }
5993      /**
5994       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5995       */
5996      public Builder addAuths(
5997          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
5998        if (authsBuilder_ == null) {
5999          ensureAuthsIsMutable();
6000          auths_.add(builderForValue.build());
6001          onChanged();
6002        } else {
6003          authsBuilder_.addMessage(builderForValue.build());
6004        }
6005        return this;
6006      }
6007      /**
6008       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6009       */
6010      public Builder addAuths(
6011          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
6012        if (authsBuilder_ == null) {
6013          ensureAuthsIsMutable();
6014          auths_.add(index, builderForValue.build());
6015          onChanged();
6016        } else {
6017          authsBuilder_.addMessage(index, builderForValue.build());
6018        }
6019        return this;
6020      }
6021      /**
6022       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6023       */
6024      public Builder addAllAuths(
6025          java.lang.Iterable<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> values) {
6026        if (authsBuilder_ == null) {
6027          ensureAuthsIsMutable();
6028          super.addAll(values, auths_);
6029          onChanged();
6030        } else {
6031          authsBuilder_.addAllMessages(values);
6032        }
6033        return this;
6034      }
6035      /**
6036       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6037       */
6038      public Builder clearAuths() {
6039        if (authsBuilder_ == null) {
6040          auths_ = java.util.Collections.emptyList();
6041          bitField0_ = (bitField0_ & ~0x00000008);
6042          onChanged();
6043        } else {
6044          authsBuilder_.clear();
6045        }
6046        return this;
6047      }
6048      /**
6049       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6050       */
6051      public Builder removeAuths(int index) {
6052        if (authsBuilder_ == null) {
6053          ensureAuthsIsMutable();
6054          auths_.remove(index);
6055          onChanged();
6056        } else {
6057          authsBuilder_.remove(index);
6058        }
6059        return this;
6060      }
6061      /**
6062       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6063       */
6064      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder getAuthsBuilder(
6065          int index) {
6066        return getAuthsFieldBuilder().getBuilder(index);
6067      }
6068      /**
6069       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6070       */
6071      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
6072          int index) {
6073        if (authsBuilder_ == null) {
6074          return auths_.get(index);  } else {
6075          return authsBuilder_.getMessageOrBuilder(index);
6076        }
6077      }
6078      /**
6079       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6080       */
6081      public java.util.List<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
6082           getAuthsOrBuilderList() {
6083        if (authsBuilder_ != null) {
6084          return authsBuilder_.getMessageOrBuilderList();
6085        } else {
6086          return java.util.Collections.unmodifiableList(auths_);
6087        }
6088      }
6089      /**
6090       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6091       */
6092      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder addAuthsBuilder() {
6093        return getAuthsFieldBuilder().addBuilder(
6094            org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance());
6095      }
6096      /**
6097       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6098       */
6099      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder addAuthsBuilder(
6100          int index) {
6101        return getAuthsFieldBuilder().addBuilder(
6102            index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance());
6103      }
6104      /**
6105       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6106       */
6107      public java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder> 
6108           getAuthsBuilderList() {
6109        return getAuthsFieldBuilder().getBuilderList();
6110      }
6111      private com.google.protobuf.RepeatedFieldBuilder<
6112          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
6113          getAuthsFieldBuilder() {
6114        if (authsBuilder_ == null) {
6115          authsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
6116              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>(
6117                  auths_,
6118                  ((bitField0_ & 0x00000008) == 0x00000008),
6119                  getParentForChildren(),
6120                  isClean());
6121          auths_ = null;
6122        }
6123        return authsBuilder_;
6124      }
6125
6126      // @@protoc_insertion_point(builder_scope:hadoop.common.RpcSaslProto)
6127    }
6128
6129    static {
6130      defaultInstance = new RpcSaslProto(true);
6131      defaultInstance.initFields();
6132    }
6133
6134    // @@protoc_insertion_point(class_scope:hadoop.common.RpcSaslProto)
6135  }
6136
6137  private static com.google.protobuf.Descriptors.Descriptor
6138    internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
6139  private static
6140    com.google.protobuf.GeneratedMessage.FieldAccessorTable
6141      internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable;
6142  private static com.google.protobuf.Descriptors.Descriptor
6143    internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
6144  private static
6145    com.google.protobuf.GeneratedMessage.FieldAccessorTable
6146      internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable;
6147  private static com.google.protobuf.Descriptors.Descriptor
6148    internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
6149  private static
6150    com.google.protobuf.GeneratedMessage.FieldAccessorTable
6151      internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable;
6152  private static com.google.protobuf.Descriptors.Descriptor
6153    internal_static_hadoop_common_RpcSaslProto_descriptor;
6154  private static
6155    com.google.protobuf.GeneratedMessage.FieldAccessorTable
6156      internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable;
6157  private static com.google.protobuf.Descriptors.Descriptor
6158    internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
6159  private static
6160    com.google.protobuf.GeneratedMessage.FieldAccessorTable
6161      internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable;
6162
6163  public static com.google.protobuf.Descriptors.FileDescriptor
6164      getDescriptor() {
6165    return descriptor;
6166  }
6167  private static com.google.protobuf.Descriptors.FileDescriptor
6168      descriptor;
6169  static {
6170    java.lang.String[] descriptorData = {
6171      "\n\017RpcHeader.proto\022\rhadoop.common\"6\n\021RPCT" +
6172      "raceInfoProto\022\017\n\007traceId\030\001 \001(\003\022\020\n\010parent" +
6173      "Id\030\002 \001(\003\"\327\002\n\025RpcRequestHeaderProto\022,\n\007rp" +
6174      "cKind\030\001 \001(\0162\033.hadoop.common.RpcKindProto" +
6175      "\022B\n\005rpcOp\030\002 \001(\01623.hadoop.common.RpcReque" +
6176      "stHeaderProto.OperationProto\022\016\n\006callId\030\003" +
6177      " \002(\021\022\020\n\010clientId\030\004 \002(\014\022\026\n\nretryCount\030\005 \001" +
6178      "(\021:\002-1\0223\n\ttraceInfo\030\006 \001(\0132 .hadoop.commo" +
6179      "n.RPCTraceInfoProto\"]\n\016OperationProto\022\024\n" +
6180      "\020RPC_FINAL_PACKET\020\000\022\033\n\027RPC_CONTINUATION_",
6181      "PACKET\020\001\022\030\n\024RPC_CLOSE_CONNECTION\020\002\"\312\005\n\026R" +
6182      "pcResponseHeaderProto\022\016\n\006callId\030\001 \002(\r\022D\n" +
6183      "\006status\030\002 \002(\01624.hadoop.common.RpcRespons" +
6184      "eHeaderProto.RpcStatusProto\022\033\n\023serverIpc" +
6185      "VersionNum\030\003 \001(\r\022\032\n\022exceptionClassName\030\004" +
6186      " \001(\t\022\020\n\010errorMsg\030\005 \001(\t\022L\n\013errorDetail\030\006 " +
6187      "\001(\01627.hadoop.common.RpcResponseHeaderPro" +
6188      "to.RpcErrorCodeProto\022\020\n\010clientId\030\007 \001(\014\022\026" +
6189      "\n\nretryCount\030\010 \001(\021:\002-1\"3\n\016RpcStatusProto" +
6190      "\022\013\n\007SUCCESS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"\341\002\n\021",
6191      "RpcErrorCodeProto\022\025\n\021ERROR_APPLICATION\020\001" +
6192      "\022\030\n\024ERROR_NO_SUCH_METHOD\020\002\022\032\n\026ERROR_NO_S" +
6193      "UCH_PROTOCOL\020\003\022\024\n\020ERROR_RPC_SERVER\020\004\022\036\n\032" +
6194      "ERROR_SERIALIZING_RESPONSE\020\005\022\036\n\032ERROR_RP" +
6195      "C_VERSION_MISMATCH\020\006\022\021\n\rFATAL_UNKNOWN\020\n\022" +
6196      "#\n\037FATAL_UNSUPPORTED_SERIALIZATION\020\013\022\034\n\030" +
6197      "FATAL_INVALID_RPC_HEADER\020\014\022\037\n\033FATAL_DESE" +
6198      "RIALIZING_REQUEST\020\r\022\032\n\026FATAL_VERSION_MIS" +
6199      "MATCH\020\016\022\026\n\022FATAL_UNAUTHORIZED\020\017\"\335\002\n\014RpcS" +
6200      "aslProto\022\017\n\007version\030\001 \001(\r\0224\n\005state\030\002 \002(\016",
6201      "2%.hadoop.common.RpcSaslProto.SaslState\022" +
6202      "\r\n\005token\030\003 \001(\014\0223\n\005auths\030\004 \003(\0132$.hadoop.c" +
6203      "ommon.RpcSaslProto.SaslAuth\032d\n\010SaslAuth\022" +
6204      "\016\n\006method\030\001 \002(\t\022\021\n\tmechanism\030\002 \002(\t\022\020\n\010pr" +
6205      "otocol\030\003 \001(\t\022\020\n\010serverId\030\004 \001(\t\022\021\n\tchalle" +
6206      "nge\030\005 \001(\014\"\\\n\tSaslState\022\013\n\007SUCCESS\020\000\022\r\n\tN" +
6207      "EGOTIATE\020\001\022\014\n\010INITIATE\020\002\022\r\n\tCHALLENGE\020\003\022" +
6208      "\014\n\010RESPONSE\020\004\022\010\n\004WRAP\020\005*J\n\014RpcKindProto\022" +
6209      "\017\n\013RPC_BUILTIN\020\000\022\020\n\014RPC_WRITABLE\020\001\022\027\n\023RP" +
6210      "C_PROTOCOL_BUFFER\020\002B4\n\036org.apache.hadoop",
6211      ".ipc.protobufB\017RpcHeaderProtos\240\001\001"
6212    };
6213    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
6214      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
6215        public com.google.protobuf.ExtensionRegistry assignDescriptors(
6216            com.google.protobuf.Descriptors.FileDescriptor root) {
6217          descriptor = root;
6218          internal_static_hadoop_common_RPCTraceInfoProto_descriptor =
6219            getDescriptor().getMessageTypes().get(0);
6220          internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable = new
6221            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
6222              internal_static_hadoop_common_RPCTraceInfoProto_descriptor,
6223              new java.lang.String[] { "TraceId", "ParentId", });
6224          internal_static_hadoop_common_RpcRequestHeaderProto_descriptor =
6225            getDescriptor().getMessageTypes().get(1);
6226          internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable = new
6227            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
6228              internal_static_hadoop_common_RpcRequestHeaderProto_descriptor,
6229              new java.lang.String[] { "RpcKind", "RpcOp", "CallId", "ClientId", "RetryCount", "TraceInfo", });
6230          internal_static_hadoop_common_RpcResponseHeaderProto_descriptor =
6231            getDescriptor().getMessageTypes().get(2);
6232          internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable = new
6233            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
6234              internal_static_hadoop_common_RpcResponseHeaderProto_descriptor,
6235              new java.lang.String[] { "CallId", "Status", "ServerIpcVersionNum", "ExceptionClassName", "ErrorMsg", "ErrorDetail", "ClientId", "RetryCount", });
6236          internal_static_hadoop_common_RpcSaslProto_descriptor =
6237            getDescriptor().getMessageTypes().get(3);
6238          internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable = new
6239            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
6240              internal_static_hadoop_common_RpcSaslProto_descriptor,
6241              new java.lang.String[] { "Version", "State", "Token", "Auths", });
6242          internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor =
6243            internal_static_hadoop_common_RpcSaslProto_descriptor.getNestedTypes().get(0);
6244          internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable = new
6245            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
6246              internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor,
6247              new java.lang.String[] { "Method", "Mechanism", "Protocol", "ServerId", "Challenge", });
6248          return null;
6249        }
6250      };
6251    com.google.protobuf.Descriptors.FileDescriptor
6252      .internalBuildGeneratedFileFrom(descriptorData,
6253        new com.google.protobuf.Descriptors.FileDescriptor[] {
6254        }, assigner);
6255  }
6256
6257  // @@protoc_insertion_point(outer_class_scope)
6258}