001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: test.proto
003
004package org.apache.hadoop.ipc.protobuf;
005
006public final class TestProtos {
007  private TestProtos() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  public interface EmptyRequestProtoOrBuilder
012      extends com.google.protobuf.MessageOrBuilder {
013  }
014  /**
015   * Protobuf type {@code hadoop.common.EmptyRequestProto}
016   */
017  public static final class EmptyRequestProto extends
018      com.google.protobuf.GeneratedMessage
019      implements EmptyRequestProtoOrBuilder {
020    // Use EmptyRequestProto.newBuilder() to construct.
021    private EmptyRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
022      super(builder);
023      this.unknownFields = builder.getUnknownFields();
024    }
025    private EmptyRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
026
027    private static final EmptyRequestProto defaultInstance;
028    public static EmptyRequestProto getDefaultInstance() {
029      return defaultInstance;
030    }
031
032    public EmptyRequestProto getDefaultInstanceForType() {
033      return defaultInstance;
034    }
035
036    private final com.google.protobuf.UnknownFieldSet unknownFields;
037    @java.lang.Override
038    public final com.google.protobuf.UnknownFieldSet
039        getUnknownFields() {
040      return this.unknownFields;
041    }
042    private EmptyRequestProto(
043        com.google.protobuf.CodedInputStream input,
044        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
045        throws com.google.protobuf.InvalidProtocolBufferException {
046      initFields();
047      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
048          com.google.protobuf.UnknownFieldSet.newBuilder();
049      try {
050        boolean done = false;
051        while (!done) {
052          int tag = input.readTag();
053          switch (tag) {
054            case 0:
055              done = true;
056              break;
057            default: {
058              if (!parseUnknownField(input, unknownFields,
059                                     extensionRegistry, tag)) {
060                done = true;
061              }
062              break;
063            }
064          }
065        }
066      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
067        throw e.setUnfinishedMessage(this);
068      } catch (java.io.IOException e) {
069        throw new com.google.protobuf.InvalidProtocolBufferException(
070            e.getMessage()).setUnfinishedMessage(this);
071      } finally {
072        this.unknownFields = unknownFields.build();
073        makeExtensionsImmutable();
074      }
075    }
076    public static final com.google.protobuf.Descriptors.Descriptor
077        getDescriptor() {
078      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
079    }
080
081    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
082        internalGetFieldAccessorTable() {
083      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
084          .ensureFieldAccessorsInitialized(
085              org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
086    }
087
088    public static com.google.protobuf.Parser<EmptyRequestProto> PARSER =
089        new com.google.protobuf.AbstractParser<EmptyRequestProto>() {
090      public EmptyRequestProto parsePartialFrom(
091          com.google.protobuf.CodedInputStream input,
092          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
093          throws com.google.protobuf.InvalidProtocolBufferException {
094        return new EmptyRequestProto(input, extensionRegistry);
095      }
096    };
097
098    @java.lang.Override
099    public com.google.protobuf.Parser<EmptyRequestProto> getParserForType() {
100      return PARSER;
101    }
102
103    private void initFields() {
104    }
105    private byte memoizedIsInitialized = -1;
106    public final boolean isInitialized() {
107      byte isInitialized = memoizedIsInitialized;
108      if (isInitialized != -1) return isInitialized == 1;
109
110      memoizedIsInitialized = 1;
111      return true;
112    }
113
114    public void writeTo(com.google.protobuf.CodedOutputStream output)
115                        throws java.io.IOException {
116      getSerializedSize();
117      getUnknownFields().writeTo(output);
118    }
119
120    private int memoizedSerializedSize = -1;
121    public int getSerializedSize() {
122      int size = memoizedSerializedSize;
123      if (size != -1) return size;
124
125      size = 0;
126      size += getUnknownFields().getSerializedSize();
127      memoizedSerializedSize = size;
128      return size;
129    }
130
131    private static final long serialVersionUID = 0L;
132    @java.lang.Override
133    protected java.lang.Object writeReplace()
134        throws java.io.ObjectStreamException {
135      return super.writeReplace();
136    }
137
138    @java.lang.Override
139    public boolean equals(final java.lang.Object obj) {
140      if (obj == this) {
141       return true;
142      }
143      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)) {
144        return super.equals(obj);
145      }
146      org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) obj;
147
148      boolean result = true;
149      result = result &&
150          getUnknownFields().equals(other.getUnknownFields());
151      return result;
152    }
153
154    private int memoizedHashCode = 0;
155    @java.lang.Override
156    public int hashCode() {
157      if (memoizedHashCode != 0) {
158        return memoizedHashCode;
159      }
160      int hash = 41;
161      hash = (19 * hash) + getDescriptorForType().hashCode();
162      hash = (29 * hash) + getUnknownFields().hashCode();
163      memoizedHashCode = hash;
164      return hash;
165    }
166
167    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
168        com.google.protobuf.ByteString data)
169        throws com.google.protobuf.InvalidProtocolBufferException {
170      return PARSER.parseFrom(data);
171    }
172    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
173        com.google.protobuf.ByteString data,
174        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
175        throws com.google.protobuf.InvalidProtocolBufferException {
176      return PARSER.parseFrom(data, extensionRegistry);
177    }
178    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(byte[] data)
179        throws com.google.protobuf.InvalidProtocolBufferException {
180      return PARSER.parseFrom(data);
181    }
182    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
183        byte[] data,
184        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
185        throws com.google.protobuf.InvalidProtocolBufferException {
186      return PARSER.parseFrom(data, extensionRegistry);
187    }
188    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(java.io.InputStream input)
189        throws java.io.IOException {
190      return PARSER.parseFrom(input);
191    }
192    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
193        java.io.InputStream input,
194        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
195        throws java.io.IOException {
196      return PARSER.parseFrom(input, extensionRegistry);
197    }
198    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(java.io.InputStream input)
199        throws java.io.IOException {
200      return PARSER.parseDelimitedFrom(input);
201    }
202    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(
203        java.io.InputStream input,
204        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
205        throws java.io.IOException {
206      return PARSER.parseDelimitedFrom(input, extensionRegistry);
207    }
208    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
209        com.google.protobuf.CodedInputStream input)
210        throws java.io.IOException {
211      return PARSER.parseFrom(input);
212    }
213    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
214        com.google.protobuf.CodedInputStream input,
215        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
216        throws java.io.IOException {
217      return PARSER.parseFrom(input, extensionRegistry);
218    }
219
220    public static Builder newBuilder() { return Builder.create(); }
221    public Builder newBuilderForType() { return newBuilder(); }
222    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto prototype) {
223      return newBuilder().mergeFrom(prototype);
224    }
225    public Builder toBuilder() { return newBuilder(this); }
226
227    @java.lang.Override
228    protected Builder newBuilderForType(
229        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
230      Builder builder = new Builder(parent);
231      return builder;
232    }
233    /**
234     * Protobuf type {@code hadoop.common.EmptyRequestProto}
235     */
236    public static final class Builder extends
237        com.google.protobuf.GeneratedMessage.Builder<Builder>
238       implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProtoOrBuilder {
239      public static final com.google.protobuf.Descriptors.Descriptor
240          getDescriptor() {
241        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
242      }
243
244      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
245          internalGetFieldAccessorTable() {
246        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
247            .ensureFieldAccessorsInitialized(
248                org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
249      }
250
251      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.newBuilder()
252      private Builder() {
253        maybeForceBuilderInitialization();
254      }
255
256      private Builder(
257          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
258        super(parent);
259        maybeForceBuilderInitialization();
260      }
261      private void maybeForceBuilderInitialization() {
262        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
263        }
264      }
265      private static Builder create() {
266        return new Builder();
267      }
268
269      public Builder clear() {
270        super.clear();
271        return this;
272      }
273
274      public Builder clone() {
275        return create().mergeFrom(buildPartial());
276      }
277
278      public com.google.protobuf.Descriptors.Descriptor
279          getDescriptorForType() {
280        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
281      }
282
283      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto getDefaultInstanceForType() {
284        return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
285      }
286
287      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto build() {
288        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = buildPartial();
289        if (!result.isInitialized()) {
290          throw newUninitializedMessageException(result);
291        }
292        return result;
293      }
294
295      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto buildPartial() {
296        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto(this);
297        onBuilt();
298        return result;
299      }
300
301      public Builder mergeFrom(com.google.protobuf.Message other) {
302        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) {
303          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)other);
304        } else {
305          super.mergeFrom(other);
306          return this;
307        }
308      }
309
310      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other) {
311        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance()) return this;
312        this.mergeUnknownFields(other.getUnknownFields());
313        return this;
314      }
315
316      public final boolean isInitialized() {
317        return true;
318      }
319
320      public Builder mergeFrom(
321          com.google.protobuf.CodedInputStream input,
322          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
323          throws java.io.IOException {
324        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parsedMessage = null;
325        try {
326          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
327        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
328          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) e.getUnfinishedMessage();
329          throw e;
330        } finally {
331          if (parsedMessage != null) {
332            mergeFrom(parsedMessage);
333          }
334        }
335        return this;
336      }
337
338      // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyRequestProto)
339    }
340
341    static {
342      defaultInstance = new EmptyRequestProto(true);
343      defaultInstance.initFields();
344    }
345
346    // @@protoc_insertion_point(class_scope:hadoop.common.EmptyRequestProto)
347  }
348
349  public interface EmptyResponseProtoOrBuilder
350      extends com.google.protobuf.MessageOrBuilder {
351  }
352  /**
353   * Protobuf type {@code hadoop.common.EmptyResponseProto}
354   */
355  public static final class EmptyResponseProto extends
356      com.google.protobuf.GeneratedMessage
357      implements EmptyResponseProtoOrBuilder {
358    // Use EmptyResponseProto.newBuilder() to construct.
359    private EmptyResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
360      super(builder);
361      this.unknownFields = builder.getUnknownFields();
362    }
363    private EmptyResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
364
365    private static final EmptyResponseProto defaultInstance;
366    public static EmptyResponseProto getDefaultInstance() {
367      return defaultInstance;
368    }
369
370    public EmptyResponseProto getDefaultInstanceForType() {
371      return defaultInstance;
372    }
373
374    private final com.google.protobuf.UnknownFieldSet unknownFields;
375    @java.lang.Override
376    public final com.google.protobuf.UnknownFieldSet
377        getUnknownFields() {
378      return this.unknownFields;
379    }
380    private EmptyResponseProto(
381        com.google.protobuf.CodedInputStream input,
382        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
383        throws com.google.protobuf.InvalidProtocolBufferException {
384      initFields();
385      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
386          com.google.protobuf.UnknownFieldSet.newBuilder();
387      try {
388        boolean done = false;
389        while (!done) {
390          int tag = input.readTag();
391          switch (tag) {
392            case 0:
393              done = true;
394              break;
395            default: {
396              if (!parseUnknownField(input, unknownFields,
397                                     extensionRegistry, tag)) {
398                done = true;
399              }
400              break;
401            }
402          }
403        }
404      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
405        throw e.setUnfinishedMessage(this);
406      } catch (java.io.IOException e) {
407        throw new com.google.protobuf.InvalidProtocolBufferException(
408            e.getMessage()).setUnfinishedMessage(this);
409      } finally {
410        this.unknownFields = unknownFields.build();
411        makeExtensionsImmutable();
412      }
413    }
414    public static final com.google.protobuf.Descriptors.Descriptor
415        getDescriptor() {
416      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
417    }
418
419    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
420        internalGetFieldAccessorTable() {
421      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
422          .ensureFieldAccessorsInitialized(
423              org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
424    }
425
426    public static com.google.protobuf.Parser<EmptyResponseProto> PARSER =
427        new com.google.protobuf.AbstractParser<EmptyResponseProto>() {
428      public EmptyResponseProto parsePartialFrom(
429          com.google.protobuf.CodedInputStream input,
430          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
431          throws com.google.protobuf.InvalidProtocolBufferException {
432        return new EmptyResponseProto(input, extensionRegistry);
433      }
434    };
435
436    @java.lang.Override
437    public com.google.protobuf.Parser<EmptyResponseProto> getParserForType() {
438      return PARSER;
439    }
440
441    private void initFields() {
442    }
443    private byte memoizedIsInitialized = -1;
444    public final boolean isInitialized() {
445      byte isInitialized = memoizedIsInitialized;
446      if (isInitialized != -1) return isInitialized == 1;
447
448      memoizedIsInitialized = 1;
449      return true;
450    }
451
452    public void writeTo(com.google.protobuf.CodedOutputStream output)
453                        throws java.io.IOException {
454      getSerializedSize();
455      getUnknownFields().writeTo(output);
456    }
457
458    private int memoizedSerializedSize = -1;
459    public int getSerializedSize() {
460      int size = memoizedSerializedSize;
461      if (size != -1) return size;
462
463      size = 0;
464      size += getUnknownFields().getSerializedSize();
465      memoizedSerializedSize = size;
466      return size;
467    }
468
469    private static final long serialVersionUID = 0L;
470    @java.lang.Override
471    protected java.lang.Object writeReplace()
472        throws java.io.ObjectStreamException {
473      return super.writeReplace();
474    }
475
476    @java.lang.Override
477    public boolean equals(final java.lang.Object obj) {
478      if (obj == this) {
479       return true;
480      }
481      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)) {
482        return super.equals(obj);
483      }
484      org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) obj;
485
486      boolean result = true;
487      result = result &&
488          getUnknownFields().equals(other.getUnknownFields());
489      return result;
490    }
491
492    private int memoizedHashCode = 0;
493    @java.lang.Override
494    public int hashCode() {
495      if (memoizedHashCode != 0) {
496        return memoizedHashCode;
497      }
498      int hash = 41;
499      hash = (19 * hash) + getDescriptorForType().hashCode();
500      hash = (29 * hash) + getUnknownFields().hashCode();
501      memoizedHashCode = hash;
502      return hash;
503    }
504
505    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
506        com.google.protobuf.ByteString data)
507        throws com.google.protobuf.InvalidProtocolBufferException {
508      return PARSER.parseFrom(data);
509    }
510    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
511        com.google.protobuf.ByteString data,
512        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
513        throws com.google.protobuf.InvalidProtocolBufferException {
514      return PARSER.parseFrom(data, extensionRegistry);
515    }
516    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(byte[] data)
517        throws com.google.protobuf.InvalidProtocolBufferException {
518      return PARSER.parseFrom(data);
519    }
520    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
521        byte[] data,
522        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
523        throws com.google.protobuf.InvalidProtocolBufferException {
524      return PARSER.parseFrom(data, extensionRegistry);
525    }
526    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(java.io.InputStream input)
527        throws java.io.IOException {
528      return PARSER.parseFrom(input);
529    }
530    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
531        java.io.InputStream input,
532        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
533        throws java.io.IOException {
534      return PARSER.parseFrom(input, extensionRegistry);
535    }
536    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(java.io.InputStream input)
537        throws java.io.IOException {
538      return PARSER.parseDelimitedFrom(input);
539    }
540    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(
541        java.io.InputStream input,
542        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
543        throws java.io.IOException {
544      return PARSER.parseDelimitedFrom(input, extensionRegistry);
545    }
546    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
547        com.google.protobuf.CodedInputStream input)
548        throws java.io.IOException {
549      return PARSER.parseFrom(input);
550    }
551    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
552        com.google.protobuf.CodedInputStream input,
553        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
554        throws java.io.IOException {
555      return PARSER.parseFrom(input, extensionRegistry);
556    }
557
558    public static Builder newBuilder() { return Builder.create(); }
559    public Builder newBuilderForType() { return newBuilder(); }
560    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto prototype) {
561      return newBuilder().mergeFrom(prototype);
562    }
563    public Builder toBuilder() { return newBuilder(this); }
564
565    @java.lang.Override
566    protected Builder newBuilderForType(
567        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
568      Builder builder = new Builder(parent);
569      return builder;
570    }
571    /**
572     * Protobuf type {@code hadoop.common.EmptyResponseProto}
573     */
574    public static final class Builder extends
575        com.google.protobuf.GeneratedMessage.Builder<Builder>
576       implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProtoOrBuilder {
577      public static final com.google.protobuf.Descriptors.Descriptor
578          getDescriptor() {
579        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
580      }
581
582      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
583          internalGetFieldAccessorTable() {
584        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
585            .ensureFieldAccessorsInitialized(
586                org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
587      }
588
589      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.newBuilder()
590      private Builder() {
591        maybeForceBuilderInitialization();
592      }
593
594      private Builder(
595          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
596        super(parent);
597        maybeForceBuilderInitialization();
598      }
599      private void maybeForceBuilderInitialization() {
600        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
601        }
602      }
603      private static Builder create() {
604        return new Builder();
605      }
606
607      public Builder clear() {
608        super.clear();
609        return this;
610      }
611
612      public Builder clone() {
613        return create().mergeFrom(buildPartial());
614      }
615
616      public com.google.protobuf.Descriptors.Descriptor
617          getDescriptorForType() {
618        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
619      }
620
621      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto getDefaultInstanceForType() {
622        return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
623      }
624
625      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto build() {
626        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = buildPartial();
627        if (!result.isInitialized()) {
628          throw newUninitializedMessageException(result);
629        }
630        return result;
631      }
632
633      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto buildPartial() {
634        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto(this);
635        onBuilt();
636        return result;
637      }
638
639      public Builder mergeFrom(com.google.protobuf.Message other) {
640        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) {
641          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)other);
642        } else {
643          super.mergeFrom(other);
644          return this;
645        }
646      }
647
648      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other) {
649        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()) return this;
650        this.mergeUnknownFields(other.getUnknownFields());
651        return this;
652      }
653
654      public final boolean isInitialized() {
655        return true;
656      }
657
658      public Builder mergeFrom(
659          com.google.protobuf.CodedInputStream input,
660          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
661          throws java.io.IOException {
662        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parsedMessage = null;
663        try {
664          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
665        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
666          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) e.getUnfinishedMessage();
667          throw e;
668        } finally {
669          if (parsedMessage != null) {
670            mergeFrom(parsedMessage);
671          }
672        }
673        return this;
674      }
675
676      // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyResponseProto)
677    }
678
679    static {
680      defaultInstance = new EmptyResponseProto(true);
681      defaultInstance.initFields();
682    }
683
684    // @@protoc_insertion_point(class_scope:hadoop.common.EmptyResponseProto)
685  }
686
687  public interface EchoRequestProtoOrBuilder
688      extends com.google.protobuf.MessageOrBuilder {
689
690    // required string message = 1;
691    /**
692     * <code>required string message = 1;</code>
693     */
694    boolean hasMessage();
695    /**
696     * <code>required string message = 1;</code>
697     */
698    java.lang.String getMessage();
699    /**
700     * <code>required string message = 1;</code>
701     */
702    com.google.protobuf.ByteString
703        getMessageBytes();
704  }
705  /**
706   * Protobuf type {@code hadoop.common.EchoRequestProto}
707   */
708  public static final class EchoRequestProto extends
709      com.google.protobuf.GeneratedMessage
710      implements EchoRequestProtoOrBuilder {
711    // Use EchoRequestProto.newBuilder() to construct.
712    private EchoRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
713      super(builder);
714      this.unknownFields = builder.getUnknownFields();
715    }
716    private EchoRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
717
718    private static final EchoRequestProto defaultInstance;
719    public static EchoRequestProto getDefaultInstance() {
720      return defaultInstance;
721    }
722
723    public EchoRequestProto getDefaultInstanceForType() {
724      return defaultInstance;
725    }
726
727    private final com.google.protobuf.UnknownFieldSet unknownFields;
728    @java.lang.Override
729    public final com.google.protobuf.UnknownFieldSet
730        getUnknownFields() {
731      return this.unknownFields;
732    }
733    private EchoRequestProto(
734        com.google.protobuf.CodedInputStream input,
735        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
736        throws com.google.protobuf.InvalidProtocolBufferException {
737      initFields();
738      int mutable_bitField0_ = 0;
739      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
740          com.google.protobuf.UnknownFieldSet.newBuilder();
741      try {
742        boolean done = false;
743        while (!done) {
744          int tag = input.readTag();
745          switch (tag) {
746            case 0:
747              done = true;
748              break;
749            default: {
750              if (!parseUnknownField(input, unknownFields,
751                                     extensionRegistry, tag)) {
752                done = true;
753              }
754              break;
755            }
756            case 10: {
757              bitField0_ |= 0x00000001;
758              message_ = input.readBytes();
759              break;
760            }
761          }
762        }
763      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
764        throw e.setUnfinishedMessage(this);
765      } catch (java.io.IOException e) {
766        throw new com.google.protobuf.InvalidProtocolBufferException(
767            e.getMessage()).setUnfinishedMessage(this);
768      } finally {
769        this.unknownFields = unknownFields.build();
770        makeExtensionsImmutable();
771      }
772    }
773    public static final com.google.protobuf.Descriptors.Descriptor
774        getDescriptor() {
775      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
776    }
777
778    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
779        internalGetFieldAccessorTable() {
780      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
781          .ensureFieldAccessorsInitialized(
782              org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
783    }
784
785    public static com.google.protobuf.Parser<EchoRequestProto> PARSER =
786        new com.google.protobuf.AbstractParser<EchoRequestProto>() {
787      public EchoRequestProto parsePartialFrom(
788          com.google.protobuf.CodedInputStream input,
789          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
790          throws com.google.protobuf.InvalidProtocolBufferException {
791        return new EchoRequestProto(input, extensionRegistry);
792      }
793    };
794
795    @java.lang.Override
796    public com.google.protobuf.Parser<EchoRequestProto> getParserForType() {
797      return PARSER;
798    }
799
800    private int bitField0_;
801    // required string message = 1;
802    public static final int MESSAGE_FIELD_NUMBER = 1;
803    private java.lang.Object message_;
804    /**
805     * <code>required string message = 1;</code>
806     */
807    public boolean hasMessage() {
808      return ((bitField0_ & 0x00000001) == 0x00000001);
809    }
810    /**
811     * <code>required string message = 1;</code>
812     */
813    public java.lang.String getMessage() {
814      java.lang.Object ref = message_;
815      if (ref instanceof java.lang.String) {
816        return (java.lang.String) ref;
817      } else {
818        com.google.protobuf.ByteString bs = 
819            (com.google.protobuf.ByteString) ref;
820        java.lang.String s = bs.toStringUtf8();
821        if (bs.isValidUtf8()) {
822          message_ = s;
823        }
824        return s;
825      }
826    }
827    /**
828     * <code>required string message = 1;</code>
829     */
830    public com.google.protobuf.ByteString
831        getMessageBytes() {
832      java.lang.Object ref = message_;
833      if (ref instanceof java.lang.String) {
834        com.google.protobuf.ByteString b = 
835            com.google.protobuf.ByteString.copyFromUtf8(
836                (java.lang.String) ref);
837        message_ = b;
838        return b;
839      } else {
840        return (com.google.protobuf.ByteString) ref;
841      }
842    }
843
844    private void initFields() {
845      message_ = "";
846    }
847    private byte memoizedIsInitialized = -1;
848    public final boolean isInitialized() {
849      byte isInitialized = memoizedIsInitialized;
850      if (isInitialized != -1) return isInitialized == 1;
851
852      if (!hasMessage()) {
853        memoizedIsInitialized = 0;
854        return false;
855      }
856      memoizedIsInitialized = 1;
857      return true;
858    }
859
860    public void writeTo(com.google.protobuf.CodedOutputStream output)
861                        throws java.io.IOException {
862      getSerializedSize();
863      if (((bitField0_ & 0x00000001) == 0x00000001)) {
864        output.writeBytes(1, getMessageBytes());
865      }
866      getUnknownFields().writeTo(output);
867    }
868
869    private int memoizedSerializedSize = -1;
870    public int getSerializedSize() {
871      int size = memoizedSerializedSize;
872      if (size != -1) return size;
873
874      size = 0;
875      if (((bitField0_ & 0x00000001) == 0x00000001)) {
876        size += com.google.protobuf.CodedOutputStream
877          .computeBytesSize(1, getMessageBytes());
878      }
879      size += getUnknownFields().getSerializedSize();
880      memoizedSerializedSize = size;
881      return size;
882    }
883
884    private static final long serialVersionUID = 0L;
885    @java.lang.Override
886    protected java.lang.Object writeReplace()
887        throws java.io.ObjectStreamException {
888      return super.writeReplace();
889    }
890
891    @java.lang.Override
892    public boolean equals(final java.lang.Object obj) {
893      if (obj == this) {
894       return true;
895      }
896      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)) {
897        return super.equals(obj);
898      }
899      org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) obj;
900
901      boolean result = true;
902      result = result && (hasMessage() == other.hasMessage());
903      if (hasMessage()) {
904        result = result && getMessage()
905            .equals(other.getMessage());
906      }
907      result = result &&
908          getUnknownFields().equals(other.getUnknownFields());
909      return result;
910    }
911
912    private int memoizedHashCode = 0;
913    @java.lang.Override
914    public int hashCode() {
915      if (memoizedHashCode != 0) {
916        return memoizedHashCode;
917      }
918      int hash = 41;
919      hash = (19 * hash) + getDescriptorForType().hashCode();
920      if (hasMessage()) {
921        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
922        hash = (53 * hash) + getMessage().hashCode();
923      }
924      hash = (29 * hash) + getUnknownFields().hashCode();
925      memoizedHashCode = hash;
926      return hash;
927    }
928
929    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
930        com.google.protobuf.ByteString data)
931        throws com.google.protobuf.InvalidProtocolBufferException {
932      return PARSER.parseFrom(data);
933    }
934    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
935        com.google.protobuf.ByteString data,
936        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
937        throws com.google.protobuf.InvalidProtocolBufferException {
938      return PARSER.parseFrom(data, extensionRegistry);
939    }
940    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(byte[] data)
941        throws com.google.protobuf.InvalidProtocolBufferException {
942      return PARSER.parseFrom(data);
943    }
944    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
945        byte[] data,
946        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
947        throws com.google.protobuf.InvalidProtocolBufferException {
948      return PARSER.parseFrom(data, extensionRegistry);
949    }
950    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(java.io.InputStream input)
951        throws java.io.IOException {
952      return PARSER.parseFrom(input);
953    }
954    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
955        java.io.InputStream input,
956        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
957        throws java.io.IOException {
958      return PARSER.parseFrom(input, extensionRegistry);
959    }
960    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(java.io.InputStream input)
961        throws java.io.IOException {
962      return PARSER.parseDelimitedFrom(input);
963    }
964    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(
965        java.io.InputStream input,
966        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
967        throws java.io.IOException {
968      return PARSER.parseDelimitedFrom(input, extensionRegistry);
969    }
970    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
971        com.google.protobuf.CodedInputStream input)
972        throws java.io.IOException {
973      return PARSER.parseFrom(input);
974    }
975    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
976        com.google.protobuf.CodedInputStream input,
977        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
978        throws java.io.IOException {
979      return PARSER.parseFrom(input, extensionRegistry);
980    }
981
982    public static Builder newBuilder() { return Builder.create(); }
983    public Builder newBuilderForType() { return newBuilder(); }
984    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto prototype) {
985      return newBuilder().mergeFrom(prototype);
986    }
987    public Builder toBuilder() { return newBuilder(this); }
988
989    @java.lang.Override
990    protected Builder newBuilderForType(
991        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
992      Builder builder = new Builder(parent);
993      return builder;
994    }
995    /**
996     * Protobuf type {@code hadoop.common.EchoRequestProto}
997     */
998    public static final class Builder extends
999        com.google.protobuf.GeneratedMessage.Builder<Builder>
1000       implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProtoOrBuilder {
1001      public static final com.google.protobuf.Descriptors.Descriptor
1002          getDescriptor() {
1003        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
1004      }
1005
1006      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1007          internalGetFieldAccessorTable() {
1008        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
1009            .ensureFieldAccessorsInitialized(
1010                org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
1011      }
1012
1013      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.newBuilder()
1014      private Builder() {
1015        maybeForceBuilderInitialization();
1016      }
1017
1018      private Builder(
1019          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1020        super(parent);
1021        maybeForceBuilderInitialization();
1022      }
1023      private void maybeForceBuilderInitialization() {
1024        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1025        }
1026      }
1027      private static Builder create() {
1028        return new Builder();
1029      }
1030
1031      public Builder clear() {
1032        super.clear();
1033        message_ = "";
1034        bitField0_ = (bitField0_ & ~0x00000001);
1035        return this;
1036      }
1037
1038      public Builder clone() {
1039        return create().mergeFrom(buildPartial());
1040      }
1041
1042      public com.google.protobuf.Descriptors.Descriptor
1043          getDescriptorForType() {
1044        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
1045      }
1046
1047      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto getDefaultInstanceForType() {
1048        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
1049      }
1050
1051      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto build() {
1052        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = buildPartial();
1053        if (!result.isInitialized()) {
1054          throw newUninitializedMessageException(result);
1055        }
1056        return result;
1057      }
1058
1059      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto buildPartial() {
1060        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto(this);
1061        int from_bitField0_ = bitField0_;
1062        int to_bitField0_ = 0;
1063        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1064          to_bitField0_ |= 0x00000001;
1065        }
1066        result.message_ = message_;
1067        result.bitField0_ = to_bitField0_;
1068        onBuilt();
1069        return result;
1070      }
1071
1072      public Builder mergeFrom(com.google.protobuf.Message other) {
1073        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) {
1074          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)other);
1075        } else {
1076          super.mergeFrom(other);
1077          return this;
1078        }
1079      }
1080
1081      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other) {
1082        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance()) return this;
1083        if (other.hasMessage()) {
1084          bitField0_ |= 0x00000001;
1085          message_ = other.message_;
1086          onChanged();
1087        }
1088        this.mergeUnknownFields(other.getUnknownFields());
1089        return this;
1090      }
1091
1092      public final boolean isInitialized() {
1093        if (!hasMessage()) {
1094          
1095          return false;
1096        }
1097        return true;
1098      }
1099
1100      public Builder mergeFrom(
1101          com.google.protobuf.CodedInputStream input,
1102          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1103          throws java.io.IOException {
1104        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parsedMessage = null;
1105        try {
1106          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1107        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1108          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) e.getUnfinishedMessage();
1109          throw e;
1110        } finally {
1111          if (parsedMessage != null) {
1112            mergeFrom(parsedMessage);
1113          }
1114        }
1115        return this;
1116      }
1117      private int bitField0_;
1118
1119      // required string message = 1;
1120      private java.lang.Object message_ = "";
1121      /**
1122       * <code>required string message = 1;</code>
1123       */
1124      public boolean hasMessage() {
1125        return ((bitField0_ & 0x00000001) == 0x00000001);
1126      }
1127      /**
1128       * <code>required string message = 1;</code>
1129       */
1130      public java.lang.String getMessage() {
1131        java.lang.Object ref = message_;
1132        if (!(ref instanceof java.lang.String)) {
1133          java.lang.String s = ((com.google.protobuf.ByteString) ref)
1134              .toStringUtf8();
1135          message_ = s;
1136          return s;
1137        } else {
1138          return (java.lang.String) ref;
1139        }
1140      }
1141      /**
1142       * <code>required string message = 1;</code>
1143       */
1144      public com.google.protobuf.ByteString
1145          getMessageBytes() {
1146        java.lang.Object ref = message_;
1147        if (ref instanceof String) {
1148          com.google.protobuf.ByteString b = 
1149              com.google.protobuf.ByteString.copyFromUtf8(
1150                  (java.lang.String) ref);
1151          message_ = b;
1152          return b;
1153        } else {
1154          return (com.google.protobuf.ByteString) ref;
1155        }
1156      }
1157      /**
1158       * <code>required string message = 1;</code>
1159       */
1160      public Builder setMessage(
1161          java.lang.String value) {
1162        if (value == null) {
1163    throw new NullPointerException();
1164  }
1165  bitField0_ |= 0x00000001;
1166        message_ = value;
1167        onChanged();
1168        return this;
1169      }
1170      /**
1171       * <code>required string message = 1;</code>
1172       */
1173      public Builder clearMessage() {
1174        bitField0_ = (bitField0_ & ~0x00000001);
1175        message_ = getDefaultInstance().getMessage();
1176        onChanged();
1177        return this;
1178      }
1179      /**
1180       * <code>required string message = 1;</code>
1181       */
1182      public Builder setMessageBytes(
1183          com.google.protobuf.ByteString value) {
1184        if (value == null) {
1185    throw new NullPointerException();
1186  }
1187  bitField0_ |= 0x00000001;
1188        message_ = value;
1189        onChanged();
1190        return this;
1191      }
1192
1193      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoRequestProto)
1194    }
1195
1196    static {
1197      defaultInstance = new EchoRequestProto(true);
1198      defaultInstance.initFields();
1199    }
1200
1201    // @@protoc_insertion_point(class_scope:hadoop.common.EchoRequestProto)
1202  }
1203
1204  public interface EchoResponseProtoOrBuilder
1205      extends com.google.protobuf.MessageOrBuilder {
1206
1207    // required string message = 1;
1208    /**
1209     * <code>required string message = 1;</code>
1210     */
1211    boolean hasMessage();
1212    /**
1213     * <code>required string message = 1;</code>
1214     */
1215    java.lang.String getMessage();
1216    /**
1217     * <code>required string message = 1;</code>
1218     */
1219    com.google.protobuf.ByteString
1220        getMessageBytes();
1221  }
1222  /**
1223   * Protobuf type {@code hadoop.common.EchoResponseProto}
1224   */
1225  public static final class EchoResponseProto extends
1226      com.google.protobuf.GeneratedMessage
1227      implements EchoResponseProtoOrBuilder {
1228    // Use EchoResponseProto.newBuilder() to construct.
1229    private EchoResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1230      super(builder);
1231      this.unknownFields = builder.getUnknownFields();
1232    }
1233    private EchoResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1234
1235    private static final EchoResponseProto defaultInstance;
1236    public static EchoResponseProto getDefaultInstance() {
1237      return defaultInstance;
1238    }
1239
1240    public EchoResponseProto getDefaultInstanceForType() {
1241      return defaultInstance;
1242    }
1243
1244    private final com.google.protobuf.UnknownFieldSet unknownFields;
1245    @java.lang.Override
1246    public final com.google.protobuf.UnknownFieldSet
1247        getUnknownFields() {
1248      return this.unknownFields;
1249    }
1250    private EchoResponseProto(
1251        com.google.protobuf.CodedInputStream input,
1252        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1253        throws com.google.protobuf.InvalidProtocolBufferException {
1254      initFields();
1255      int mutable_bitField0_ = 0;
1256      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1257          com.google.protobuf.UnknownFieldSet.newBuilder();
1258      try {
1259        boolean done = false;
1260        while (!done) {
1261          int tag = input.readTag();
1262          switch (tag) {
1263            case 0:
1264              done = true;
1265              break;
1266            default: {
1267              if (!parseUnknownField(input, unknownFields,
1268                                     extensionRegistry, tag)) {
1269                done = true;
1270              }
1271              break;
1272            }
1273            case 10: {
1274              bitField0_ |= 0x00000001;
1275              message_ = input.readBytes();
1276              break;
1277            }
1278          }
1279        }
1280      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1281        throw e.setUnfinishedMessage(this);
1282      } catch (java.io.IOException e) {
1283        throw new com.google.protobuf.InvalidProtocolBufferException(
1284            e.getMessage()).setUnfinishedMessage(this);
1285      } finally {
1286        this.unknownFields = unknownFields.build();
1287        makeExtensionsImmutable();
1288      }
1289    }
1290    public static final com.google.protobuf.Descriptors.Descriptor
1291        getDescriptor() {
1292      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1293    }
1294
1295    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1296        internalGetFieldAccessorTable() {
1297      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
1298          .ensureFieldAccessorsInitialized(
1299              org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
1300    }
1301
1302    public static com.google.protobuf.Parser<EchoResponseProto> PARSER =
1303        new com.google.protobuf.AbstractParser<EchoResponseProto>() {
1304      public EchoResponseProto parsePartialFrom(
1305          com.google.protobuf.CodedInputStream input,
1306          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1307          throws com.google.protobuf.InvalidProtocolBufferException {
1308        return new EchoResponseProto(input, extensionRegistry);
1309      }
1310    };
1311
1312    @java.lang.Override
1313    public com.google.protobuf.Parser<EchoResponseProto> getParserForType() {
1314      return PARSER;
1315    }
1316
1317    private int bitField0_;
1318    // required string message = 1;
1319    public static final int MESSAGE_FIELD_NUMBER = 1;
1320    private java.lang.Object message_;
1321    /**
1322     * <code>required string message = 1;</code>
1323     */
1324    public boolean hasMessage() {
1325      return ((bitField0_ & 0x00000001) == 0x00000001);
1326    }
1327    /**
1328     * <code>required string message = 1;</code>
1329     */
1330    public java.lang.String getMessage() {
1331      java.lang.Object ref = message_;
1332      if (ref instanceof java.lang.String) {
1333        return (java.lang.String) ref;
1334      } else {
1335        com.google.protobuf.ByteString bs = 
1336            (com.google.protobuf.ByteString) ref;
1337        java.lang.String s = bs.toStringUtf8();
1338        if (bs.isValidUtf8()) {
1339          message_ = s;
1340        }
1341        return s;
1342      }
1343    }
1344    /**
1345     * <code>required string message = 1;</code>
1346     */
1347    public com.google.protobuf.ByteString
1348        getMessageBytes() {
1349      java.lang.Object ref = message_;
1350      if (ref instanceof java.lang.String) {
1351        com.google.protobuf.ByteString b = 
1352            com.google.protobuf.ByteString.copyFromUtf8(
1353                (java.lang.String) ref);
1354        message_ = b;
1355        return b;
1356      } else {
1357        return (com.google.protobuf.ByteString) ref;
1358      }
1359    }
1360
1361    private void initFields() {
1362      message_ = "";
1363    }
1364    private byte memoizedIsInitialized = -1;
1365    public final boolean isInitialized() {
1366      byte isInitialized = memoizedIsInitialized;
1367      if (isInitialized != -1) return isInitialized == 1;
1368
1369      if (!hasMessage()) {
1370        memoizedIsInitialized = 0;
1371        return false;
1372      }
1373      memoizedIsInitialized = 1;
1374      return true;
1375    }
1376
1377    public void writeTo(com.google.protobuf.CodedOutputStream output)
1378                        throws java.io.IOException {
1379      getSerializedSize();
1380      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1381        output.writeBytes(1, getMessageBytes());
1382      }
1383      getUnknownFields().writeTo(output);
1384    }
1385
1386    private int memoizedSerializedSize = -1;
1387    public int getSerializedSize() {
1388      int size = memoizedSerializedSize;
1389      if (size != -1) return size;
1390
1391      size = 0;
1392      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1393        size += com.google.protobuf.CodedOutputStream
1394          .computeBytesSize(1, getMessageBytes());
1395      }
1396      size += getUnknownFields().getSerializedSize();
1397      memoizedSerializedSize = size;
1398      return size;
1399    }
1400
1401    private static final long serialVersionUID = 0L;
1402    @java.lang.Override
1403    protected java.lang.Object writeReplace()
1404        throws java.io.ObjectStreamException {
1405      return super.writeReplace();
1406    }
1407
1408    @java.lang.Override
1409    public boolean equals(final java.lang.Object obj) {
1410      if (obj == this) {
1411       return true;
1412      }
1413      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)) {
1414        return super.equals(obj);
1415      }
1416      org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) obj;
1417
1418      boolean result = true;
1419      result = result && (hasMessage() == other.hasMessage());
1420      if (hasMessage()) {
1421        result = result && getMessage()
1422            .equals(other.getMessage());
1423      }
1424      result = result &&
1425          getUnknownFields().equals(other.getUnknownFields());
1426      return result;
1427    }
1428
1429    private int memoizedHashCode = 0;
1430    @java.lang.Override
1431    public int hashCode() {
1432      if (memoizedHashCode != 0) {
1433        return memoizedHashCode;
1434      }
1435      int hash = 41;
1436      hash = (19 * hash) + getDescriptorForType().hashCode();
1437      if (hasMessage()) {
1438        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
1439        hash = (53 * hash) + getMessage().hashCode();
1440      }
1441      hash = (29 * hash) + getUnknownFields().hashCode();
1442      memoizedHashCode = hash;
1443      return hash;
1444    }
1445
1446    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1447        com.google.protobuf.ByteString data)
1448        throws com.google.protobuf.InvalidProtocolBufferException {
1449      return PARSER.parseFrom(data);
1450    }
1451    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1452        com.google.protobuf.ByteString data,
1453        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1454        throws com.google.protobuf.InvalidProtocolBufferException {
1455      return PARSER.parseFrom(data, extensionRegistry);
1456    }
1457    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(byte[] data)
1458        throws com.google.protobuf.InvalidProtocolBufferException {
1459      return PARSER.parseFrom(data);
1460    }
1461    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1462        byte[] data,
1463        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1464        throws com.google.protobuf.InvalidProtocolBufferException {
1465      return PARSER.parseFrom(data, extensionRegistry);
1466    }
1467    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(java.io.InputStream input)
1468        throws java.io.IOException {
1469      return PARSER.parseFrom(input);
1470    }
1471    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1472        java.io.InputStream input,
1473        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1474        throws java.io.IOException {
1475      return PARSER.parseFrom(input, extensionRegistry);
1476    }
1477    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(java.io.InputStream input)
1478        throws java.io.IOException {
1479      return PARSER.parseDelimitedFrom(input);
1480    }
1481    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(
1482        java.io.InputStream input,
1483        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1484        throws java.io.IOException {
1485      return PARSER.parseDelimitedFrom(input, extensionRegistry);
1486    }
1487    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1488        com.google.protobuf.CodedInputStream input)
1489        throws java.io.IOException {
1490      return PARSER.parseFrom(input);
1491    }
1492    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1493        com.google.protobuf.CodedInputStream input,
1494        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1495        throws java.io.IOException {
1496      return PARSER.parseFrom(input, extensionRegistry);
1497    }
1498
1499    public static Builder newBuilder() { return Builder.create(); }
1500    public Builder newBuilderForType() { return newBuilder(); }
1501    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto prototype) {
1502      return newBuilder().mergeFrom(prototype);
1503    }
1504    public Builder toBuilder() { return newBuilder(this); }
1505
1506    @java.lang.Override
1507    protected Builder newBuilderForType(
1508        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1509      Builder builder = new Builder(parent);
1510      return builder;
1511    }
1512    /**
1513     * Protobuf type {@code hadoop.common.EchoResponseProto}
1514     */
1515    public static final class Builder extends
1516        com.google.protobuf.GeneratedMessage.Builder<Builder>
1517       implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProtoOrBuilder {
1518      public static final com.google.protobuf.Descriptors.Descriptor
1519          getDescriptor() {
1520        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1521      }
1522
1523      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1524          internalGetFieldAccessorTable() {
1525        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
1526            .ensureFieldAccessorsInitialized(
1527                org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
1528      }
1529
1530      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.newBuilder()
1531      private Builder() {
1532        maybeForceBuilderInitialization();
1533      }
1534
1535      private Builder(
1536          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1537        super(parent);
1538        maybeForceBuilderInitialization();
1539      }
1540      private void maybeForceBuilderInitialization() {
1541        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1542        }
1543      }
1544      private static Builder create() {
1545        return new Builder();
1546      }
1547
1548      public Builder clear() {
1549        super.clear();
1550        message_ = "";
1551        bitField0_ = (bitField0_ & ~0x00000001);
1552        return this;
1553      }
1554
1555      public Builder clone() {
1556        return create().mergeFrom(buildPartial());
1557      }
1558
1559      public com.google.protobuf.Descriptors.Descriptor
1560          getDescriptorForType() {
1561        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1562      }
1563
1564      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto getDefaultInstanceForType() {
1565        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
1566      }
1567
1568      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto build() {
1569        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = buildPartial();
1570        if (!result.isInitialized()) {
1571          throw newUninitializedMessageException(result);
1572        }
1573        return result;
1574      }
1575
1576      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto buildPartial() {
1577        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto(this);
1578        int from_bitField0_ = bitField0_;
1579        int to_bitField0_ = 0;
1580        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1581          to_bitField0_ |= 0x00000001;
1582        }
1583        result.message_ = message_;
1584        result.bitField0_ = to_bitField0_;
1585        onBuilt();
1586        return result;
1587      }
1588
1589      public Builder mergeFrom(com.google.protobuf.Message other) {
1590        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) {
1591          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)other);
1592        } else {
1593          super.mergeFrom(other);
1594          return this;
1595        }
1596      }
1597
1598      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other) {
1599        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()) return this;
1600        if (other.hasMessage()) {
1601          bitField0_ |= 0x00000001;
1602          message_ = other.message_;
1603          onChanged();
1604        }
1605        this.mergeUnknownFields(other.getUnknownFields());
1606        return this;
1607      }
1608
1609      public final boolean isInitialized() {
1610        if (!hasMessage()) {
1611          
1612          return false;
1613        }
1614        return true;
1615      }
1616
1617      public Builder mergeFrom(
1618          com.google.protobuf.CodedInputStream input,
1619          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1620          throws java.io.IOException {
1621        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parsedMessage = null;
1622        try {
1623          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1624        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1625          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) e.getUnfinishedMessage();
1626          throw e;
1627        } finally {
1628          if (parsedMessage != null) {
1629            mergeFrom(parsedMessage);
1630          }
1631        }
1632        return this;
1633      }
1634      private int bitField0_;
1635
1636      // required string message = 1;
1637      private java.lang.Object message_ = "";
1638      /**
1639       * <code>required string message = 1;</code>
1640       */
1641      public boolean hasMessage() {
1642        return ((bitField0_ & 0x00000001) == 0x00000001);
1643      }
1644      /**
1645       * <code>required string message = 1;</code>
1646       */
1647      public java.lang.String getMessage() {
1648        java.lang.Object ref = message_;
1649        if (!(ref instanceof java.lang.String)) {
1650          java.lang.String s = ((com.google.protobuf.ByteString) ref)
1651              .toStringUtf8();
1652          message_ = s;
1653          return s;
1654        } else {
1655          return (java.lang.String) ref;
1656        }
1657      }
1658      /**
1659       * <code>required string message = 1;</code>
1660       */
1661      public com.google.protobuf.ByteString
1662          getMessageBytes() {
1663        java.lang.Object ref = message_;
1664        if (ref instanceof String) {
1665          com.google.protobuf.ByteString b = 
1666              com.google.protobuf.ByteString.copyFromUtf8(
1667                  (java.lang.String) ref);
1668          message_ = b;
1669          return b;
1670        } else {
1671          return (com.google.protobuf.ByteString) ref;
1672        }
1673      }
1674      /**
1675       * <code>required string message = 1;</code>
1676       */
1677      public Builder setMessage(
1678          java.lang.String value) {
1679        if (value == null) {
1680    throw new NullPointerException();
1681  }
1682  bitField0_ |= 0x00000001;
1683        message_ = value;
1684        onChanged();
1685        return this;
1686      }
1687      /**
1688       * <code>required string message = 1;</code>
1689       */
1690      public Builder clearMessage() {
1691        bitField0_ = (bitField0_ & ~0x00000001);
1692        message_ = getDefaultInstance().getMessage();
1693        onChanged();
1694        return this;
1695      }
1696      /**
1697       * <code>required string message = 1;</code>
1698       */
1699      public Builder setMessageBytes(
1700          com.google.protobuf.ByteString value) {
1701        if (value == null) {
1702    throw new NullPointerException();
1703  }
1704  bitField0_ |= 0x00000001;
1705        message_ = value;
1706        onChanged();
1707        return this;
1708      }
1709
1710      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoResponseProto)
1711    }
1712
1713    static {
1714      defaultInstance = new EchoResponseProto(true);
1715      defaultInstance.initFields();
1716    }
1717
1718    // @@protoc_insertion_point(class_scope:hadoop.common.EchoResponseProto)
1719  }
1720
1721  public interface SleepRequestProtoOrBuilder
1722      extends com.google.protobuf.MessageOrBuilder {
1723
1724    // required int32 milliSeconds = 1;
1725    /**
1726     * <code>required int32 milliSeconds = 1;</code>
1727     */
1728    boolean hasMilliSeconds();
1729    /**
1730     * <code>required int32 milliSeconds = 1;</code>
1731     */
1732    int getMilliSeconds();
1733  }
1734  /**
1735   * Protobuf type {@code hadoop.common.SleepRequestProto}
1736   */
1737  public static final class SleepRequestProto extends
1738      com.google.protobuf.GeneratedMessage
1739      implements SleepRequestProtoOrBuilder {
1740    // Use SleepRequestProto.newBuilder() to construct.
1741    private SleepRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1742      super(builder);
1743      this.unknownFields = builder.getUnknownFields();
1744    }
1745    private SleepRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1746
1747    private static final SleepRequestProto defaultInstance;
1748    public static SleepRequestProto getDefaultInstance() {
1749      return defaultInstance;
1750    }
1751
1752    public SleepRequestProto getDefaultInstanceForType() {
1753      return defaultInstance;
1754    }
1755
1756    private final com.google.protobuf.UnknownFieldSet unknownFields;
1757    @java.lang.Override
1758    public final com.google.protobuf.UnknownFieldSet
1759        getUnknownFields() {
1760      return this.unknownFields;
1761    }
1762    private SleepRequestProto(
1763        com.google.protobuf.CodedInputStream input,
1764        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1765        throws com.google.protobuf.InvalidProtocolBufferException {
1766      initFields();
1767      int mutable_bitField0_ = 0;
1768      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1769          com.google.protobuf.UnknownFieldSet.newBuilder();
1770      try {
1771        boolean done = false;
1772        while (!done) {
1773          int tag = input.readTag();
1774          switch (tag) {
1775            case 0:
1776              done = true;
1777              break;
1778            default: {
1779              if (!parseUnknownField(input, unknownFields,
1780                                     extensionRegistry, tag)) {
1781                done = true;
1782              }
1783              break;
1784            }
1785            case 8: {
1786              bitField0_ |= 0x00000001;
1787              milliSeconds_ = input.readInt32();
1788              break;
1789            }
1790          }
1791        }
1792      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1793        throw e.setUnfinishedMessage(this);
1794      } catch (java.io.IOException e) {
1795        throw new com.google.protobuf.InvalidProtocolBufferException(
1796            e.getMessage()).setUnfinishedMessage(this);
1797      } finally {
1798        this.unknownFields = unknownFields.build();
1799        makeExtensionsImmutable();
1800      }
1801    }
1802    public static final com.google.protobuf.Descriptors.Descriptor
1803        getDescriptor() {
1804      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_descriptor;
1805    }
1806
1807    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1808        internalGetFieldAccessorTable() {
1809      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable
1810          .ensureFieldAccessorsInitialized(
1811              org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.Builder.class);
1812    }
1813
1814    public static com.google.protobuf.Parser<SleepRequestProto> PARSER =
1815        new com.google.protobuf.AbstractParser<SleepRequestProto>() {
1816      public SleepRequestProto parsePartialFrom(
1817          com.google.protobuf.CodedInputStream input,
1818          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1819          throws com.google.protobuf.InvalidProtocolBufferException {
1820        return new SleepRequestProto(input, extensionRegistry);
1821      }
1822    };
1823
1824    @java.lang.Override
1825    public com.google.protobuf.Parser<SleepRequestProto> getParserForType() {
1826      return PARSER;
1827    }
1828
1829    private int bitField0_;
1830    // required int32 milliSeconds = 1;
1831    public static final int MILLISECONDS_FIELD_NUMBER = 1;
1832    private int milliSeconds_;
1833    /**
1834     * <code>required int32 milliSeconds = 1;</code>
1835     */
1836    public boolean hasMilliSeconds() {
1837      return ((bitField0_ & 0x00000001) == 0x00000001);
1838    }
1839    /**
1840     * <code>required int32 milliSeconds = 1;</code>
1841     */
1842    public int getMilliSeconds() {
1843      return milliSeconds_;
1844    }
1845
1846    private void initFields() {
1847      milliSeconds_ = 0;
1848    }
1849    private byte memoizedIsInitialized = -1;
1850    public final boolean isInitialized() {
1851      byte isInitialized = memoizedIsInitialized;
1852      if (isInitialized != -1) return isInitialized == 1;
1853
1854      if (!hasMilliSeconds()) {
1855        memoizedIsInitialized = 0;
1856        return false;
1857      }
1858      memoizedIsInitialized = 1;
1859      return true;
1860    }
1861
1862    public void writeTo(com.google.protobuf.CodedOutputStream output)
1863                        throws java.io.IOException {
1864      getSerializedSize();
1865      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1866        output.writeInt32(1, milliSeconds_);
1867      }
1868      getUnknownFields().writeTo(output);
1869    }
1870
1871    private int memoizedSerializedSize = -1;
1872    public int getSerializedSize() {
1873      int size = memoizedSerializedSize;
1874      if (size != -1) return size;
1875
1876      size = 0;
1877      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1878        size += com.google.protobuf.CodedOutputStream
1879          .computeInt32Size(1, milliSeconds_);
1880      }
1881      size += getUnknownFields().getSerializedSize();
1882      memoizedSerializedSize = size;
1883      return size;
1884    }
1885
1886    private static final long serialVersionUID = 0L;
1887    @java.lang.Override
1888    protected java.lang.Object writeReplace()
1889        throws java.io.ObjectStreamException {
1890      return super.writeReplace();
1891    }
1892
1893    @java.lang.Override
1894    public boolean equals(final java.lang.Object obj) {
1895      if (obj == this) {
1896       return true;
1897      }
1898      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)) {
1899        return super.equals(obj);
1900      }
1901      org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto) obj;
1902
1903      boolean result = true;
1904      result = result && (hasMilliSeconds() == other.hasMilliSeconds());
1905      if (hasMilliSeconds()) {
1906        result = result && (getMilliSeconds()
1907            == other.getMilliSeconds());
1908      }
1909      result = result &&
1910          getUnknownFields().equals(other.getUnknownFields());
1911      return result;
1912    }
1913
1914    private int memoizedHashCode = 0;
1915    @java.lang.Override
1916    public int hashCode() {
1917      if (memoizedHashCode != 0) {
1918        return memoizedHashCode;
1919      }
1920      int hash = 41;
1921      hash = (19 * hash) + getDescriptorForType().hashCode();
1922      if (hasMilliSeconds()) {
1923        hash = (37 * hash) + MILLISECONDS_FIELD_NUMBER;
1924        hash = (53 * hash) + getMilliSeconds();
1925      }
1926      hash = (29 * hash) + getUnknownFields().hashCode();
1927      memoizedHashCode = hash;
1928      return hash;
1929    }
1930
1931    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
1932        com.google.protobuf.ByteString data)
1933        throws com.google.protobuf.InvalidProtocolBufferException {
1934      return PARSER.parseFrom(data);
1935    }
1936    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
1937        com.google.protobuf.ByteString data,
1938        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1939        throws com.google.protobuf.InvalidProtocolBufferException {
1940      return PARSER.parseFrom(data, extensionRegistry);
1941    }
1942    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(byte[] data)
1943        throws com.google.protobuf.InvalidProtocolBufferException {
1944      return PARSER.parseFrom(data);
1945    }
1946    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
1947        byte[] data,
1948        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1949        throws com.google.protobuf.InvalidProtocolBufferException {
1950      return PARSER.parseFrom(data, extensionRegistry);
1951    }
1952    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(java.io.InputStream input)
1953        throws java.io.IOException {
1954      return PARSER.parseFrom(input);
1955    }
1956    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
1957        java.io.InputStream input,
1958        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1959        throws java.io.IOException {
1960      return PARSER.parseFrom(input, extensionRegistry);
1961    }
1962    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseDelimitedFrom(java.io.InputStream input)
1963        throws java.io.IOException {
1964      return PARSER.parseDelimitedFrom(input);
1965    }
1966    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseDelimitedFrom(
1967        java.io.InputStream input,
1968        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1969        throws java.io.IOException {
1970      return PARSER.parseDelimitedFrom(input, extensionRegistry);
1971    }
1972    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
1973        com.google.protobuf.CodedInputStream input)
1974        throws java.io.IOException {
1975      return PARSER.parseFrom(input);
1976    }
1977    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
1978        com.google.protobuf.CodedInputStream input,
1979        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1980        throws java.io.IOException {
1981      return PARSER.parseFrom(input, extensionRegistry);
1982    }
1983
1984    public static Builder newBuilder() { return Builder.create(); }
1985    public Builder newBuilderForType() { return newBuilder(); }
1986    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto prototype) {
1987      return newBuilder().mergeFrom(prototype);
1988    }
1989    public Builder toBuilder() { return newBuilder(this); }
1990
1991    @java.lang.Override
1992    protected Builder newBuilderForType(
1993        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1994      Builder builder = new Builder(parent);
1995      return builder;
1996    }
1997    /**
1998     * Protobuf type {@code hadoop.common.SleepRequestProto}
1999     */
2000    public static final class Builder extends
2001        com.google.protobuf.GeneratedMessage.Builder<Builder>
2002       implements org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProtoOrBuilder {
2003      public static final com.google.protobuf.Descriptors.Descriptor
2004          getDescriptor() {
2005        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_descriptor;
2006      }
2007
2008      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2009          internalGetFieldAccessorTable() {
2010        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable
2011            .ensureFieldAccessorsInitialized(
2012                org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.Builder.class);
2013      }
2014
2015      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.newBuilder()
2016      private Builder() {
2017        maybeForceBuilderInitialization();
2018      }
2019
2020      private Builder(
2021          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2022        super(parent);
2023        maybeForceBuilderInitialization();
2024      }
2025      private void maybeForceBuilderInitialization() {
2026        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2027        }
2028      }
2029      private static Builder create() {
2030        return new Builder();
2031      }
2032
2033      public Builder clear() {
2034        super.clear();
2035        milliSeconds_ = 0;
2036        bitField0_ = (bitField0_ & ~0x00000001);
2037        return this;
2038      }
2039
2040      public Builder clone() {
2041        return create().mergeFrom(buildPartial());
2042      }
2043
2044      public com.google.protobuf.Descriptors.Descriptor
2045          getDescriptorForType() {
2046        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_descriptor;
2047      }
2048
2049      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto getDefaultInstanceForType() {
2050        return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
2051      }
2052
2053      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto build() {
2054        org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto result = buildPartial();
2055        if (!result.isInitialized()) {
2056          throw newUninitializedMessageException(result);
2057        }
2058        return result;
2059      }
2060
2061      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto buildPartial() {
2062        org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto(this);
2063        int from_bitField0_ = bitField0_;
2064        int to_bitField0_ = 0;
2065        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2066          to_bitField0_ |= 0x00000001;
2067        }
2068        result.milliSeconds_ = milliSeconds_;
2069        result.bitField0_ = to_bitField0_;
2070        onBuilt();
2071        return result;
2072      }
2073
2074      public Builder mergeFrom(com.google.protobuf.Message other) {
2075        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto) {
2076          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)other);
2077        } else {
2078          super.mergeFrom(other);
2079          return this;
2080        }
2081      }
2082
2083      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto other) {
2084        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance()) return this;
2085        if (other.hasMilliSeconds()) {
2086          setMilliSeconds(other.getMilliSeconds());
2087        }
2088        this.mergeUnknownFields(other.getUnknownFields());
2089        return this;
2090      }
2091
2092      public final boolean isInitialized() {
2093        if (!hasMilliSeconds()) {
2094          
2095          return false;
2096        }
2097        return true;
2098      }
2099
2100      public Builder mergeFrom(
2101          com.google.protobuf.CodedInputStream input,
2102          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2103          throws java.io.IOException {
2104        org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parsedMessage = null;
2105        try {
2106          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2107        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2108          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto) e.getUnfinishedMessage();
2109          throw e;
2110        } finally {
2111          if (parsedMessage != null) {
2112            mergeFrom(parsedMessage);
2113          }
2114        }
2115        return this;
2116      }
2117      private int bitField0_;
2118
2119      // required int32 milliSeconds = 1;
2120      private int milliSeconds_ ;
2121      /**
2122       * <code>required int32 milliSeconds = 1;</code>
2123       */
2124      public boolean hasMilliSeconds() {
2125        return ((bitField0_ & 0x00000001) == 0x00000001);
2126      }
2127      /**
2128       * <code>required int32 milliSeconds = 1;</code>
2129       */
2130      public int getMilliSeconds() {
2131        return milliSeconds_;
2132      }
2133      /**
2134       * <code>required int32 milliSeconds = 1;</code>
2135       */
2136      public Builder setMilliSeconds(int value) {
2137        bitField0_ |= 0x00000001;
2138        milliSeconds_ = value;
2139        onChanged();
2140        return this;
2141      }
2142      /**
2143       * <code>required int32 milliSeconds = 1;</code>
2144       */
2145      public Builder clearMilliSeconds() {
2146        bitField0_ = (bitField0_ & ~0x00000001);
2147        milliSeconds_ = 0;
2148        onChanged();
2149        return this;
2150      }
2151
2152      // @@protoc_insertion_point(builder_scope:hadoop.common.SleepRequestProto)
2153    }
2154
2155    static {
2156      defaultInstance = new SleepRequestProto(true);
2157      defaultInstance.initFields();
2158    }
2159
2160    // @@protoc_insertion_point(class_scope:hadoop.common.SleepRequestProto)
2161  }
2162
2163  public interface SleepResponseProtoOrBuilder
2164      extends com.google.protobuf.MessageOrBuilder {
2165  }
2166  /**
2167   * Protobuf type {@code hadoop.common.SleepResponseProto}
2168   */
2169  public static final class SleepResponseProto extends
2170      com.google.protobuf.GeneratedMessage
2171      implements SleepResponseProtoOrBuilder {
2172    // Use SleepResponseProto.newBuilder() to construct.
2173    private SleepResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2174      super(builder);
2175      this.unknownFields = builder.getUnknownFields();
2176    }
2177    private SleepResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2178
2179    private static final SleepResponseProto defaultInstance;
2180    public static SleepResponseProto getDefaultInstance() {
2181      return defaultInstance;
2182    }
2183
2184    public SleepResponseProto getDefaultInstanceForType() {
2185      return defaultInstance;
2186    }
2187
2188    private final com.google.protobuf.UnknownFieldSet unknownFields;
2189    @java.lang.Override
2190    public final com.google.protobuf.UnknownFieldSet
2191        getUnknownFields() {
2192      return this.unknownFields;
2193    }
2194    private SleepResponseProto(
2195        com.google.protobuf.CodedInputStream input,
2196        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2197        throws com.google.protobuf.InvalidProtocolBufferException {
2198      initFields();
2199      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2200          com.google.protobuf.UnknownFieldSet.newBuilder();
2201      try {
2202        boolean done = false;
2203        while (!done) {
2204          int tag = input.readTag();
2205          switch (tag) {
2206            case 0:
2207              done = true;
2208              break;
2209            default: {
2210              if (!parseUnknownField(input, unknownFields,
2211                                     extensionRegistry, tag)) {
2212                done = true;
2213              }
2214              break;
2215            }
2216          }
2217        }
2218      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2219        throw e.setUnfinishedMessage(this);
2220      } catch (java.io.IOException e) {
2221        throw new com.google.protobuf.InvalidProtocolBufferException(
2222            e.getMessage()).setUnfinishedMessage(this);
2223      } finally {
2224        this.unknownFields = unknownFields.build();
2225        makeExtensionsImmutable();
2226      }
2227    }
2228    public static final com.google.protobuf.Descriptors.Descriptor
2229        getDescriptor() {
2230      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_descriptor;
2231    }
2232
2233    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2234        internalGetFieldAccessorTable() {
2235      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable
2236          .ensureFieldAccessorsInitialized(
2237              org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.Builder.class);
2238    }
2239
2240    public static com.google.protobuf.Parser<SleepResponseProto> PARSER =
2241        new com.google.protobuf.AbstractParser<SleepResponseProto>() {
2242      public SleepResponseProto parsePartialFrom(
2243          com.google.protobuf.CodedInputStream input,
2244          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2245          throws com.google.protobuf.InvalidProtocolBufferException {
2246        return new SleepResponseProto(input, extensionRegistry);
2247      }
2248    };
2249
2250    @java.lang.Override
2251    public com.google.protobuf.Parser<SleepResponseProto> getParserForType() {
2252      return PARSER;
2253    }
2254
2255    private void initFields() {
2256    }
2257    private byte memoizedIsInitialized = -1;
2258    public final boolean isInitialized() {
2259      byte isInitialized = memoizedIsInitialized;
2260      if (isInitialized != -1) return isInitialized == 1;
2261
2262      memoizedIsInitialized = 1;
2263      return true;
2264    }
2265
2266    public void writeTo(com.google.protobuf.CodedOutputStream output)
2267                        throws java.io.IOException {
2268      getSerializedSize();
2269      getUnknownFields().writeTo(output);
2270    }
2271
2272    private int memoizedSerializedSize = -1;
2273    public int getSerializedSize() {
2274      int size = memoizedSerializedSize;
2275      if (size != -1) return size;
2276
2277      size = 0;
2278      size += getUnknownFields().getSerializedSize();
2279      memoizedSerializedSize = size;
2280      return size;
2281    }
2282
2283    private static final long serialVersionUID = 0L;
2284    @java.lang.Override
2285    protected java.lang.Object writeReplace()
2286        throws java.io.ObjectStreamException {
2287      return super.writeReplace();
2288    }
2289
2290    @java.lang.Override
2291    public boolean equals(final java.lang.Object obj) {
2292      if (obj == this) {
2293       return true;
2294      }
2295      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto)) {
2296        return super.equals(obj);
2297      }
2298      org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) obj;
2299
2300      boolean result = true;
2301      result = result &&
2302          getUnknownFields().equals(other.getUnknownFields());
2303      return result;
2304    }
2305
2306    private int memoizedHashCode = 0;
2307    @java.lang.Override
2308    public int hashCode() {
2309      if (memoizedHashCode != 0) {
2310        return memoizedHashCode;
2311      }
2312      int hash = 41;
2313      hash = (19 * hash) + getDescriptorForType().hashCode();
2314      hash = (29 * hash) + getUnknownFields().hashCode();
2315      memoizedHashCode = hash;
2316      return hash;
2317    }
2318
2319    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
2320        com.google.protobuf.ByteString data)
2321        throws com.google.protobuf.InvalidProtocolBufferException {
2322      return PARSER.parseFrom(data);
2323    }
2324    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
2325        com.google.protobuf.ByteString data,
2326        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2327        throws com.google.protobuf.InvalidProtocolBufferException {
2328      return PARSER.parseFrom(data, extensionRegistry);
2329    }
2330    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(byte[] data)
2331        throws com.google.protobuf.InvalidProtocolBufferException {
2332      return PARSER.parseFrom(data);
2333    }
2334    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
2335        byte[] data,
2336        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2337        throws com.google.protobuf.InvalidProtocolBufferException {
2338      return PARSER.parseFrom(data, extensionRegistry);
2339    }
2340    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(java.io.InputStream input)
2341        throws java.io.IOException {
2342      return PARSER.parseFrom(input);
2343    }
2344    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
2345        java.io.InputStream input,
2346        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2347        throws java.io.IOException {
2348      return PARSER.parseFrom(input, extensionRegistry);
2349    }
2350    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseDelimitedFrom(java.io.InputStream input)
2351        throws java.io.IOException {
2352      return PARSER.parseDelimitedFrom(input);
2353    }
2354    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseDelimitedFrom(
2355        java.io.InputStream input,
2356        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2357        throws java.io.IOException {
2358      return PARSER.parseDelimitedFrom(input, extensionRegistry);
2359    }
2360    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
2361        com.google.protobuf.CodedInputStream input)
2362        throws java.io.IOException {
2363      return PARSER.parseFrom(input);
2364    }
2365    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
2366        com.google.protobuf.CodedInputStream input,
2367        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2368        throws java.io.IOException {
2369      return PARSER.parseFrom(input, extensionRegistry);
2370    }
2371
2372    public static Builder newBuilder() { return Builder.create(); }
2373    public Builder newBuilderForType() { return newBuilder(); }
2374    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto prototype) {
2375      return newBuilder().mergeFrom(prototype);
2376    }
2377    public Builder toBuilder() { return newBuilder(this); }
2378
2379    @java.lang.Override
2380    protected Builder newBuilderForType(
2381        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2382      Builder builder = new Builder(parent);
2383      return builder;
2384    }
2385    /**
2386     * Protobuf type {@code hadoop.common.SleepResponseProto}
2387     */
2388    public static final class Builder extends
2389        com.google.protobuf.GeneratedMessage.Builder<Builder>
2390       implements org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProtoOrBuilder {
2391      public static final com.google.protobuf.Descriptors.Descriptor
2392          getDescriptor() {
2393        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_descriptor;
2394      }
2395
2396      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2397          internalGetFieldAccessorTable() {
2398        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable
2399            .ensureFieldAccessorsInitialized(
2400                org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.Builder.class);
2401      }
2402
2403      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.newBuilder()
2404      private Builder() {
2405        maybeForceBuilderInitialization();
2406      }
2407
2408      private Builder(
2409          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2410        super(parent);
2411        maybeForceBuilderInitialization();
2412      }
2413      private void maybeForceBuilderInitialization() {
2414        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2415        }
2416      }
2417      private static Builder create() {
2418        return new Builder();
2419      }
2420
2421      public Builder clear() {
2422        super.clear();
2423        return this;
2424      }
2425
2426      public Builder clone() {
2427        return create().mergeFrom(buildPartial());
2428      }
2429
2430      public com.google.protobuf.Descriptors.Descriptor
2431          getDescriptorForType() {
2432        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_descriptor;
2433      }
2434
2435      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto getDefaultInstanceForType() {
2436        return org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance();
2437      }
2438
2439      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto build() {
2440        org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto result = buildPartial();
2441        if (!result.isInitialized()) {
2442          throw newUninitializedMessageException(result);
2443        }
2444        return result;
2445      }
2446
2447      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto buildPartial() {
2448        org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto(this);
2449        onBuilt();
2450        return result;
2451      }
2452
2453      public Builder mergeFrom(com.google.protobuf.Message other) {
2454        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) {
2455          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto)other);
2456        } else {
2457          super.mergeFrom(other);
2458          return this;
2459        }
2460      }
2461
2462      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto other) {
2463        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance()) return this;
2464        this.mergeUnknownFields(other.getUnknownFields());
2465        return this;
2466      }
2467
2468      public final boolean isInitialized() {
2469        return true;
2470      }
2471
2472      public Builder mergeFrom(
2473          com.google.protobuf.CodedInputStream input,
2474          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2475          throws java.io.IOException {
2476        org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parsedMessage = null;
2477        try {
2478          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2479        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2480          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) e.getUnfinishedMessage();
2481          throw e;
2482        } finally {
2483          if (parsedMessage != null) {
2484            mergeFrom(parsedMessage);
2485          }
2486        }
2487        return this;
2488      }
2489
2490      // @@protoc_insertion_point(builder_scope:hadoop.common.SleepResponseProto)
2491    }
2492
2493    static {
2494      defaultInstance = new SleepResponseProto(true);
2495      defaultInstance.initFields();
2496    }
2497
2498    // @@protoc_insertion_point(class_scope:hadoop.common.SleepResponseProto)
2499  }
2500
2501  private static com.google.protobuf.Descriptors.Descriptor
2502    internal_static_hadoop_common_EmptyRequestProto_descriptor;
2503  private static
2504    com.google.protobuf.GeneratedMessage.FieldAccessorTable
2505      internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable;
2506  private static com.google.protobuf.Descriptors.Descriptor
2507    internal_static_hadoop_common_EmptyResponseProto_descriptor;
2508  private static
2509    com.google.protobuf.GeneratedMessage.FieldAccessorTable
2510      internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable;
2511  private static com.google.protobuf.Descriptors.Descriptor
2512    internal_static_hadoop_common_EchoRequestProto_descriptor;
2513  private static
2514    com.google.protobuf.GeneratedMessage.FieldAccessorTable
2515      internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable;
2516  private static com.google.protobuf.Descriptors.Descriptor
2517    internal_static_hadoop_common_EchoResponseProto_descriptor;
2518  private static
2519    com.google.protobuf.GeneratedMessage.FieldAccessorTable
2520      internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable;
2521  private static com.google.protobuf.Descriptors.Descriptor
2522    internal_static_hadoop_common_SleepRequestProto_descriptor;
2523  private static
2524    com.google.protobuf.GeneratedMessage.FieldAccessorTable
2525      internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable;
2526  private static com.google.protobuf.Descriptors.Descriptor
2527    internal_static_hadoop_common_SleepResponseProto_descriptor;
2528  private static
2529    com.google.protobuf.GeneratedMessage.FieldAccessorTable
2530      internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable;
2531
2532  public static com.google.protobuf.Descriptors.FileDescriptor
2533      getDescriptor() {
2534    return descriptor;
2535  }
2536  private static com.google.protobuf.Descriptors.FileDescriptor
2537      descriptor;
2538  static {
2539    java.lang.String[] descriptorData = {
2540      "\n\ntest.proto\022\rhadoop.common\"\023\n\021EmptyRequ" +
2541      "estProto\"\024\n\022EmptyResponseProto\"#\n\020EchoRe" +
2542      "questProto\022\017\n\007message\030\001 \002(\t\"$\n\021EchoRespo" +
2543      "nseProto\022\017\n\007message\030\001 \002(\t\")\n\021SleepReques" +
2544      "tProto\022\024\n\014milliSeconds\030\001 \002(\005\"\024\n\022SleepRes" +
2545      "ponseProtoB/\n\036org.apache.hadoop.ipc.prot" +
2546      "obufB\nTestProtos\240\001\001"
2547    };
2548    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
2549      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
2550        public com.google.protobuf.ExtensionRegistry assignDescriptors(
2551            com.google.protobuf.Descriptors.FileDescriptor root) {
2552          descriptor = root;
2553          internal_static_hadoop_common_EmptyRequestProto_descriptor =
2554            getDescriptor().getMessageTypes().get(0);
2555          internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable = new
2556            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
2557              internal_static_hadoop_common_EmptyRequestProto_descriptor,
2558              new java.lang.String[] { });
2559          internal_static_hadoop_common_EmptyResponseProto_descriptor =
2560            getDescriptor().getMessageTypes().get(1);
2561          internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable = new
2562            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
2563              internal_static_hadoop_common_EmptyResponseProto_descriptor,
2564              new java.lang.String[] { });
2565          internal_static_hadoop_common_EchoRequestProto_descriptor =
2566            getDescriptor().getMessageTypes().get(2);
2567          internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable = new
2568            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
2569              internal_static_hadoop_common_EchoRequestProto_descriptor,
2570              new java.lang.String[] { "Message", });
2571          internal_static_hadoop_common_EchoResponseProto_descriptor =
2572            getDescriptor().getMessageTypes().get(3);
2573          internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable = new
2574            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
2575              internal_static_hadoop_common_EchoResponseProto_descriptor,
2576              new java.lang.String[] { "Message", });
2577          internal_static_hadoop_common_SleepRequestProto_descriptor =
2578            getDescriptor().getMessageTypes().get(4);
2579          internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable = new
2580            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
2581              internal_static_hadoop_common_SleepRequestProto_descriptor,
2582              new java.lang.String[] { "MilliSeconds", });
2583          internal_static_hadoop_common_SleepResponseProto_descriptor =
2584            getDescriptor().getMessageTypes().get(5);
2585          internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable = new
2586            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
2587              internal_static_hadoop_common_SleepResponseProto_descriptor,
2588              new java.lang.String[] { });
2589          return null;
2590        }
2591      };
2592    com.google.protobuf.Descriptors.FileDescriptor
2593      .internalBuildGeneratedFileFrom(descriptorData,
2594        new com.google.protobuf.Descriptors.FileDescriptor[] {
2595        }, assigner);
2596  }
2597
2598  // @@protoc_insertion_point(outer_class_scope)
2599}