001 // Generated by the protocol buffer compiler. DO NOT EDIT!
002 // source: IpcConnectionContext.proto
003
004 package org.apache.hadoop.ipc.protobuf;
005
006 public final class IpcConnectionContextProtos {
007 private IpcConnectionContextProtos() {}
008 public static void registerAllExtensions(
009 com.google.protobuf.ExtensionRegistry registry) {
010 }
011 public interface UserInformationProtoOrBuilder
012 extends com.google.protobuf.MessageOrBuilder {
013
014 // optional string effectiveUser = 1;
015 /**
016 * <code>optional string effectiveUser = 1;</code>
017 */
018 boolean hasEffectiveUser();
019 /**
020 * <code>optional string effectiveUser = 1;</code>
021 */
022 java.lang.String getEffectiveUser();
023 /**
024 * <code>optional string effectiveUser = 1;</code>
025 */
026 com.google.protobuf.ByteString
027 getEffectiveUserBytes();
028
029 // optional string realUser = 2;
030 /**
031 * <code>optional string realUser = 2;</code>
032 */
033 boolean hasRealUser();
034 /**
035 * <code>optional string realUser = 2;</code>
036 */
037 java.lang.String getRealUser();
038 /**
039 * <code>optional string realUser = 2;</code>
040 */
041 com.google.protobuf.ByteString
042 getRealUserBytes();
043 }
044 /**
045 * Protobuf type {@code hadoop.common.UserInformationProto}
046 *
047 * <pre>
048 **
049 * Spec for UserInformationProto is specified in ProtoUtil#makeIpcConnectionContext
050 * </pre>
051 */
052 public static final class UserInformationProto extends
053 com.google.protobuf.GeneratedMessage
054 implements UserInformationProtoOrBuilder {
055 // Use UserInformationProto.newBuilder() to construct.
056 private UserInformationProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
057 super(builder);
058 this.unknownFields = builder.getUnknownFields();
059 }
060 private UserInformationProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
061
062 private static final UserInformationProto defaultInstance;
063 public static UserInformationProto getDefaultInstance() {
064 return defaultInstance;
065 }
066
067 public UserInformationProto getDefaultInstanceForType() {
068 return defaultInstance;
069 }
070
071 private final com.google.protobuf.UnknownFieldSet unknownFields;
072 @java.lang.Override
073 public final com.google.protobuf.UnknownFieldSet
074 getUnknownFields() {
075 return this.unknownFields;
076 }
077 private UserInformationProto(
078 com.google.protobuf.CodedInputStream input,
079 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
080 throws com.google.protobuf.InvalidProtocolBufferException {
081 initFields();
082 int mutable_bitField0_ = 0;
083 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
084 com.google.protobuf.UnknownFieldSet.newBuilder();
085 try {
086 boolean done = false;
087 while (!done) {
088 int tag = input.readTag();
089 switch (tag) {
090 case 0:
091 done = true;
092 break;
093 default: {
094 if (!parseUnknownField(input, unknownFields,
095 extensionRegistry, tag)) {
096 done = true;
097 }
098 break;
099 }
100 case 10: {
101 bitField0_ |= 0x00000001;
102 effectiveUser_ = input.readBytes();
103 break;
104 }
105 case 18: {
106 bitField0_ |= 0x00000002;
107 realUser_ = input.readBytes();
108 break;
109 }
110 }
111 }
112 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
113 throw e.setUnfinishedMessage(this);
114 } catch (java.io.IOException e) {
115 throw new com.google.protobuf.InvalidProtocolBufferException(
116 e.getMessage()).setUnfinishedMessage(this);
117 } finally {
118 this.unknownFields = unknownFields.build();
119 makeExtensionsImmutable();
120 }
121 }
122 public static final com.google.protobuf.Descriptors.Descriptor
123 getDescriptor() {
124 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_hadoop_common_UserInformationProto_descriptor;
125 }
126
127 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
128 internalGetFieldAccessorTable() {
129 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_hadoop_common_UserInformationProto_fieldAccessorTable
130 .ensureFieldAccessorsInitialized(
131 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.class, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder.class);
132 }
133
134 public static com.google.protobuf.Parser<UserInformationProto> PARSER =
135 new com.google.protobuf.AbstractParser<UserInformationProto>() {
136 public UserInformationProto parsePartialFrom(
137 com.google.protobuf.CodedInputStream input,
138 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
139 throws com.google.protobuf.InvalidProtocolBufferException {
140 return new UserInformationProto(input, extensionRegistry);
141 }
142 };
143
144 @java.lang.Override
145 public com.google.protobuf.Parser<UserInformationProto> getParserForType() {
146 return PARSER;
147 }
148
149 private int bitField0_;
150 // optional string effectiveUser = 1;
151 public static final int EFFECTIVEUSER_FIELD_NUMBER = 1;
152 private java.lang.Object effectiveUser_;
153 /**
154 * <code>optional string effectiveUser = 1;</code>
155 */
156 public boolean hasEffectiveUser() {
157 return ((bitField0_ & 0x00000001) == 0x00000001);
158 }
159 /**
160 * <code>optional string effectiveUser = 1;</code>
161 */
162 public java.lang.String getEffectiveUser() {
163 java.lang.Object ref = effectiveUser_;
164 if (ref instanceof java.lang.String) {
165 return (java.lang.String) ref;
166 } else {
167 com.google.protobuf.ByteString bs =
168 (com.google.protobuf.ByteString) ref;
169 java.lang.String s = bs.toStringUtf8();
170 if (bs.isValidUtf8()) {
171 effectiveUser_ = s;
172 }
173 return s;
174 }
175 }
176 /**
177 * <code>optional string effectiveUser = 1;</code>
178 */
179 public com.google.protobuf.ByteString
180 getEffectiveUserBytes() {
181 java.lang.Object ref = effectiveUser_;
182 if (ref instanceof java.lang.String) {
183 com.google.protobuf.ByteString b =
184 com.google.protobuf.ByteString.copyFromUtf8(
185 (java.lang.String) ref);
186 effectiveUser_ = b;
187 return b;
188 } else {
189 return (com.google.protobuf.ByteString) ref;
190 }
191 }
192
193 // optional string realUser = 2;
194 public static final int REALUSER_FIELD_NUMBER = 2;
195 private java.lang.Object realUser_;
196 /**
197 * <code>optional string realUser = 2;</code>
198 */
199 public boolean hasRealUser() {
200 return ((bitField0_ & 0x00000002) == 0x00000002);
201 }
202 /**
203 * <code>optional string realUser = 2;</code>
204 */
205 public java.lang.String getRealUser() {
206 java.lang.Object ref = realUser_;
207 if (ref instanceof java.lang.String) {
208 return (java.lang.String) ref;
209 } else {
210 com.google.protobuf.ByteString bs =
211 (com.google.protobuf.ByteString) ref;
212 java.lang.String s = bs.toStringUtf8();
213 if (bs.isValidUtf8()) {
214 realUser_ = s;
215 }
216 return s;
217 }
218 }
219 /**
220 * <code>optional string realUser = 2;</code>
221 */
222 public com.google.protobuf.ByteString
223 getRealUserBytes() {
224 java.lang.Object ref = realUser_;
225 if (ref instanceof java.lang.String) {
226 com.google.protobuf.ByteString b =
227 com.google.protobuf.ByteString.copyFromUtf8(
228 (java.lang.String) ref);
229 realUser_ = b;
230 return b;
231 } else {
232 return (com.google.protobuf.ByteString) ref;
233 }
234 }
235
236 private void initFields() {
237 effectiveUser_ = "";
238 realUser_ = "";
239 }
240 private byte memoizedIsInitialized = -1;
241 public final boolean isInitialized() {
242 byte isInitialized = memoizedIsInitialized;
243 if (isInitialized != -1) return isInitialized == 1;
244
245 memoizedIsInitialized = 1;
246 return true;
247 }
248
249 public void writeTo(com.google.protobuf.CodedOutputStream output)
250 throws java.io.IOException {
251 getSerializedSize();
252 if (((bitField0_ & 0x00000001) == 0x00000001)) {
253 output.writeBytes(1, getEffectiveUserBytes());
254 }
255 if (((bitField0_ & 0x00000002) == 0x00000002)) {
256 output.writeBytes(2, getRealUserBytes());
257 }
258 getUnknownFields().writeTo(output);
259 }
260
261 private int memoizedSerializedSize = -1;
262 public int getSerializedSize() {
263 int size = memoizedSerializedSize;
264 if (size != -1) return size;
265
266 size = 0;
267 if (((bitField0_ & 0x00000001) == 0x00000001)) {
268 size += com.google.protobuf.CodedOutputStream
269 .computeBytesSize(1, getEffectiveUserBytes());
270 }
271 if (((bitField0_ & 0x00000002) == 0x00000002)) {
272 size += com.google.protobuf.CodedOutputStream
273 .computeBytesSize(2, getRealUserBytes());
274 }
275 size += getUnknownFields().getSerializedSize();
276 memoizedSerializedSize = size;
277 return size;
278 }
279
280 private static final long serialVersionUID = 0L;
281 @java.lang.Override
282 protected java.lang.Object writeReplace()
283 throws java.io.ObjectStreamException {
284 return super.writeReplace();
285 }
286
287 @java.lang.Override
288 public boolean equals(final java.lang.Object obj) {
289 if (obj == this) {
290 return true;
291 }
292 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto)) {
293 return super.equals(obj);
294 }
295 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto other = (org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto) obj;
296
297 boolean result = true;
298 result = result && (hasEffectiveUser() == other.hasEffectiveUser());
299 if (hasEffectiveUser()) {
300 result = result && getEffectiveUser()
301 .equals(other.getEffectiveUser());
302 }
303 result = result && (hasRealUser() == other.hasRealUser());
304 if (hasRealUser()) {
305 result = result && getRealUser()
306 .equals(other.getRealUser());
307 }
308 result = result &&
309 getUnknownFields().equals(other.getUnknownFields());
310 return result;
311 }
312
313 private int memoizedHashCode = 0;
314 @java.lang.Override
315 public int hashCode() {
316 if (memoizedHashCode != 0) {
317 return memoizedHashCode;
318 }
319 int hash = 41;
320 hash = (19 * hash) + getDescriptorForType().hashCode();
321 if (hasEffectiveUser()) {
322 hash = (37 * hash) + EFFECTIVEUSER_FIELD_NUMBER;
323 hash = (53 * hash) + getEffectiveUser().hashCode();
324 }
325 if (hasRealUser()) {
326 hash = (37 * hash) + REALUSER_FIELD_NUMBER;
327 hash = (53 * hash) + getRealUser().hashCode();
328 }
329 hash = (29 * hash) + getUnknownFields().hashCode();
330 memoizedHashCode = hash;
331 return hash;
332 }
333
334 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
335 com.google.protobuf.ByteString data)
336 throws com.google.protobuf.InvalidProtocolBufferException {
337 return PARSER.parseFrom(data);
338 }
339 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
340 com.google.protobuf.ByteString data,
341 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
342 throws com.google.protobuf.InvalidProtocolBufferException {
343 return PARSER.parseFrom(data, extensionRegistry);
344 }
345 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(byte[] data)
346 throws com.google.protobuf.InvalidProtocolBufferException {
347 return PARSER.parseFrom(data);
348 }
349 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
350 byte[] data,
351 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
352 throws com.google.protobuf.InvalidProtocolBufferException {
353 return PARSER.parseFrom(data, extensionRegistry);
354 }
355 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(java.io.InputStream input)
356 throws java.io.IOException {
357 return PARSER.parseFrom(input);
358 }
359 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
360 java.io.InputStream input,
361 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
362 throws java.io.IOException {
363 return PARSER.parseFrom(input, extensionRegistry);
364 }
365 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseDelimitedFrom(java.io.InputStream input)
366 throws java.io.IOException {
367 return PARSER.parseDelimitedFrom(input);
368 }
369 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseDelimitedFrom(
370 java.io.InputStream input,
371 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
372 throws java.io.IOException {
373 return PARSER.parseDelimitedFrom(input, extensionRegistry);
374 }
375 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
376 com.google.protobuf.CodedInputStream input)
377 throws java.io.IOException {
378 return PARSER.parseFrom(input);
379 }
380 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
381 com.google.protobuf.CodedInputStream input,
382 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
383 throws java.io.IOException {
384 return PARSER.parseFrom(input, extensionRegistry);
385 }
386
387 public static Builder newBuilder() { return Builder.create(); }
388 public Builder newBuilderForType() { return newBuilder(); }
389 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto prototype) {
390 return newBuilder().mergeFrom(prototype);
391 }
392 public Builder toBuilder() { return newBuilder(this); }
393
394 @java.lang.Override
395 protected Builder newBuilderForType(
396 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
397 Builder builder = new Builder(parent);
398 return builder;
399 }
400 /**
401 * Protobuf type {@code hadoop.common.UserInformationProto}
402 *
403 * <pre>
404 **
405 * Spec for UserInformationProto is specified in ProtoUtil#makeIpcConnectionContext
406 * </pre>
407 */
408 public static final class Builder extends
409 com.google.protobuf.GeneratedMessage.Builder<Builder>
410 implements org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder {
411 public static final com.google.protobuf.Descriptors.Descriptor
412 getDescriptor() {
413 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_hadoop_common_UserInformationProto_descriptor;
414 }
415
416 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
417 internalGetFieldAccessorTable() {
418 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_hadoop_common_UserInformationProto_fieldAccessorTable
419 .ensureFieldAccessorsInitialized(
420 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.class, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder.class);
421 }
422
423 // Construct using org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.newBuilder()
424 private Builder() {
425 maybeForceBuilderInitialization();
426 }
427
428 private Builder(
429 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
430 super(parent);
431 maybeForceBuilderInitialization();
432 }
433 private void maybeForceBuilderInitialization() {
434 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
435 }
436 }
437 private static Builder create() {
438 return new Builder();
439 }
440
441 public Builder clear() {
442 super.clear();
443 effectiveUser_ = "";
444 bitField0_ = (bitField0_ & ~0x00000001);
445 realUser_ = "";
446 bitField0_ = (bitField0_ & ~0x00000002);
447 return this;
448 }
449
450 public Builder clone() {
451 return create().mergeFrom(buildPartial());
452 }
453
454 public com.google.protobuf.Descriptors.Descriptor
455 getDescriptorForType() {
456 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_hadoop_common_UserInformationProto_descriptor;
457 }
458
459 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto getDefaultInstanceForType() {
460 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance();
461 }
462
463 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto build() {
464 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto result = buildPartial();
465 if (!result.isInitialized()) {
466 throw newUninitializedMessageException(result);
467 }
468 return result;
469 }
470
471 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto buildPartial() {
472 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto result = new org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto(this);
473 int from_bitField0_ = bitField0_;
474 int to_bitField0_ = 0;
475 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
476 to_bitField0_ |= 0x00000001;
477 }
478 result.effectiveUser_ = effectiveUser_;
479 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
480 to_bitField0_ |= 0x00000002;
481 }
482 result.realUser_ = realUser_;
483 result.bitField0_ = to_bitField0_;
484 onBuilt();
485 return result;
486 }
487
488 public Builder mergeFrom(com.google.protobuf.Message other) {
489 if (other instanceof org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto) {
490 return mergeFrom((org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto)other);
491 } else {
492 super.mergeFrom(other);
493 return this;
494 }
495 }
496
497 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto other) {
498 if (other == org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance()) return this;
499 if (other.hasEffectiveUser()) {
500 bitField0_ |= 0x00000001;
501 effectiveUser_ = other.effectiveUser_;
502 onChanged();
503 }
504 if (other.hasRealUser()) {
505 bitField0_ |= 0x00000002;
506 realUser_ = other.realUser_;
507 onChanged();
508 }
509 this.mergeUnknownFields(other.getUnknownFields());
510 return this;
511 }
512
513 public final boolean isInitialized() {
514 return true;
515 }
516
517 public Builder mergeFrom(
518 com.google.protobuf.CodedInputStream input,
519 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
520 throws java.io.IOException {
521 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parsedMessage = null;
522 try {
523 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
524 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
525 parsedMessage = (org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto) e.getUnfinishedMessage();
526 throw e;
527 } finally {
528 if (parsedMessage != null) {
529 mergeFrom(parsedMessage);
530 }
531 }
532 return this;
533 }
534 private int bitField0_;
535
536 // optional string effectiveUser = 1;
537 private java.lang.Object effectiveUser_ = "";
538 /**
539 * <code>optional string effectiveUser = 1;</code>
540 */
541 public boolean hasEffectiveUser() {
542 return ((bitField0_ & 0x00000001) == 0x00000001);
543 }
544 /**
545 * <code>optional string effectiveUser = 1;</code>
546 */
547 public java.lang.String getEffectiveUser() {
548 java.lang.Object ref = effectiveUser_;
549 if (!(ref instanceof java.lang.String)) {
550 java.lang.String s = ((com.google.protobuf.ByteString) ref)
551 .toStringUtf8();
552 effectiveUser_ = s;
553 return s;
554 } else {
555 return (java.lang.String) ref;
556 }
557 }
558 /**
559 * <code>optional string effectiveUser = 1;</code>
560 */
561 public com.google.protobuf.ByteString
562 getEffectiveUserBytes() {
563 java.lang.Object ref = effectiveUser_;
564 if (ref instanceof String) {
565 com.google.protobuf.ByteString b =
566 com.google.protobuf.ByteString.copyFromUtf8(
567 (java.lang.String) ref);
568 effectiveUser_ = b;
569 return b;
570 } else {
571 return (com.google.protobuf.ByteString) ref;
572 }
573 }
574 /**
575 * <code>optional string effectiveUser = 1;</code>
576 */
577 public Builder setEffectiveUser(
578 java.lang.String value) {
579 if (value == null) {
580 throw new NullPointerException();
581 }
582 bitField0_ |= 0x00000001;
583 effectiveUser_ = value;
584 onChanged();
585 return this;
586 }
587 /**
588 * <code>optional string effectiveUser = 1;</code>
589 */
590 public Builder clearEffectiveUser() {
591 bitField0_ = (bitField0_ & ~0x00000001);
592 effectiveUser_ = getDefaultInstance().getEffectiveUser();
593 onChanged();
594 return this;
595 }
596 /**
597 * <code>optional string effectiveUser = 1;</code>
598 */
599 public Builder setEffectiveUserBytes(
600 com.google.protobuf.ByteString value) {
601 if (value == null) {
602 throw new NullPointerException();
603 }
604 bitField0_ |= 0x00000001;
605 effectiveUser_ = value;
606 onChanged();
607 return this;
608 }
609
610 // optional string realUser = 2;
611 private java.lang.Object realUser_ = "";
612 /**
613 * <code>optional string realUser = 2;</code>
614 */
615 public boolean hasRealUser() {
616 return ((bitField0_ & 0x00000002) == 0x00000002);
617 }
618 /**
619 * <code>optional string realUser = 2;</code>
620 */
621 public java.lang.String getRealUser() {
622 java.lang.Object ref = realUser_;
623 if (!(ref instanceof java.lang.String)) {
624 java.lang.String s = ((com.google.protobuf.ByteString) ref)
625 .toStringUtf8();
626 realUser_ = s;
627 return s;
628 } else {
629 return (java.lang.String) ref;
630 }
631 }
632 /**
633 * <code>optional string realUser = 2;</code>
634 */
635 public com.google.protobuf.ByteString
636 getRealUserBytes() {
637 java.lang.Object ref = realUser_;
638 if (ref instanceof String) {
639 com.google.protobuf.ByteString b =
640 com.google.protobuf.ByteString.copyFromUtf8(
641 (java.lang.String) ref);
642 realUser_ = b;
643 return b;
644 } else {
645 return (com.google.protobuf.ByteString) ref;
646 }
647 }
648 /**
649 * <code>optional string realUser = 2;</code>
650 */
651 public Builder setRealUser(
652 java.lang.String value) {
653 if (value == null) {
654 throw new NullPointerException();
655 }
656 bitField0_ |= 0x00000002;
657 realUser_ = value;
658 onChanged();
659 return this;
660 }
661 /**
662 * <code>optional string realUser = 2;</code>
663 */
664 public Builder clearRealUser() {
665 bitField0_ = (bitField0_ & ~0x00000002);
666 realUser_ = getDefaultInstance().getRealUser();
667 onChanged();
668 return this;
669 }
670 /**
671 * <code>optional string realUser = 2;</code>
672 */
673 public Builder setRealUserBytes(
674 com.google.protobuf.ByteString value) {
675 if (value == null) {
676 throw new NullPointerException();
677 }
678 bitField0_ |= 0x00000002;
679 realUser_ = value;
680 onChanged();
681 return this;
682 }
683
684 // @@protoc_insertion_point(builder_scope:hadoop.common.UserInformationProto)
685 }
686
687 static {
688 defaultInstance = new UserInformationProto(true);
689 defaultInstance.initFields();
690 }
691
692 // @@protoc_insertion_point(class_scope:hadoop.common.UserInformationProto)
693 }
694
695 public interface IpcConnectionContextProtoOrBuilder
696 extends com.google.protobuf.MessageOrBuilder {
697
698 // optional .hadoop.common.UserInformationProto userInfo = 2;
699 /**
700 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
701 *
702 * <pre>
703 * UserInfo beyond what is determined as part of security handshake
704 * at connection time (kerberos, tokens etc).
705 * </pre>
706 */
707 boolean hasUserInfo();
708 /**
709 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
710 *
711 * <pre>
712 * UserInfo beyond what is determined as part of security handshake
713 * at connection time (kerberos, tokens etc).
714 * </pre>
715 */
716 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto getUserInfo();
717 /**
718 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
719 *
720 * <pre>
721 * UserInfo beyond what is determined as part of security handshake
722 * at connection time (kerberos, tokens etc).
723 * </pre>
724 */
725 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder getUserInfoOrBuilder();
726
727 // optional string protocol = 3;
728 /**
729 * <code>optional string protocol = 3;</code>
730 *
731 * <pre>
732 * Protocol name for next rpc layer.
733 * The client created a proxy with this protocol name
734 * </pre>
735 */
736 boolean hasProtocol();
737 /**
738 * <code>optional string protocol = 3;</code>
739 *
740 * <pre>
741 * Protocol name for next rpc layer.
742 * The client created a proxy with this protocol name
743 * </pre>
744 */
745 java.lang.String getProtocol();
746 /**
747 * <code>optional string protocol = 3;</code>
748 *
749 * <pre>
750 * Protocol name for next rpc layer.
751 * The client created a proxy with this protocol name
752 * </pre>
753 */
754 com.google.protobuf.ByteString
755 getProtocolBytes();
756 }
757 /**
758 * Protobuf type {@code hadoop.common.IpcConnectionContextProto}
759 *
760 * <pre>
761 **
762 * The connection context is sent as part of the connection establishment.
763 * It establishes the context for ALL Rpc calls within the connection.
764 * </pre>
765 */
766 public static final class IpcConnectionContextProto extends
767 com.google.protobuf.GeneratedMessage
768 implements IpcConnectionContextProtoOrBuilder {
769 // Use IpcConnectionContextProto.newBuilder() to construct.
770 private IpcConnectionContextProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
771 super(builder);
772 this.unknownFields = builder.getUnknownFields();
773 }
774 private IpcConnectionContextProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
775
776 private static final IpcConnectionContextProto defaultInstance;
777 public static IpcConnectionContextProto getDefaultInstance() {
778 return defaultInstance;
779 }
780
781 public IpcConnectionContextProto getDefaultInstanceForType() {
782 return defaultInstance;
783 }
784
785 private final com.google.protobuf.UnknownFieldSet unknownFields;
786 @java.lang.Override
787 public final com.google.protobuf.UnknownFieldSet
788 getUnknownFields() {
789 return this.unknownFields;
790 }
791 private IpcConnectionContextProto(
792 com.google.protobuf.CodedInputStream input,
793 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
794 throws com.google.protobuf.InvalidProtocolBufferException {
795 initFields();
796 int mutable_bitField0_ = 0;
797 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
798 com.google.protobuf.UnknownFieldSet.newBuilder();
799 try {
800 boolean done = false;
801 while (!done) {
802 int tag = input.readTag();
803 switch (tag) {
804 case 0:
805 done = true;
806 break;
807 default: {
808 if (!parseUnknownField(input, unknownFields,
809 extensionRegistry, tag)) {
810 done = true;
811 }
812 break;
813 }
814 case 18: {
815 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder subBuilder = null;
816 if (((bitField0_ & 0x00000001) == 0x00000001)) {
817 subBuilder = userInfo_.toBuilder();
818 }
819 userInfo_ = input.readMessage(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.PARSER, extensionRegistry);
820 if (subBuilder != null) {
821 subBuilder.mergeFrom(userInfo_);
822 userInfo_ = subBuilder.buildPartial();
823 }
824 bitField0_ |= 0x00000001;
825 break;
826 }
827 case 26: {
828 bitField0_ |= 0x00000002;
829 protocol_ = input.readBytes();
830 break;
831 }
832 }
833 }
834 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
835 throw e.setUnfinishedMessage(this);
836 } catch (java.io.IOException e) {
837 throw new com.google.protobuf.InvalidProtocolBufferException(
838 e.getMessage()).setUnfinishedMessage(this);
839 } finally {
840 this.unknownFields = unknownFields.build();
841 makeExtensionsImmutable();
842 }
843 }
844 public static final com.google.protobuf.Descriptors.Descriptor
845 getDescriptor() {
846 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_hadoop_common_IpcConnectionContextProto_descriptor;
847 }
848
849 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
850 internalGetFieldAccessorTable() {
851 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_hadoop_common_IpcConnectionContextProto_fieldAccessorTable
852 .ensureFieldAccessorsInitialized(
853 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.class, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.Builder.class);
854 }
855
856 public static com.google.protobuf.Parser<IpcConnectionContextProto> PARSER =
857 new com.google.protobuf.AbstractParser<IpcConnectionContextProto>() {
858 public IpcConnectionContextProto parsePartialFrom(
859 com.google.protobuf.CodedInputStream input,
860 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
861 throws com.google.protobuf.InvalidProtocolBufferException {
862 return new IpcConnectionContextProto(input, extensionRegistry);
863 }
864 };
865
866 @java.lang.Override
867 public com.google.protobuf.Parser<IpcConnectionContextProto> getParserForType() {
868 return PARSER;
869 }
870
871 private int bitField0_;
872 // optional .hadoop.common.UserInformationProto userInfo = 2;
873 public static final int USERINFO_FIELD_NUMBER = 2;
874 private org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto userInfo_;
875 /**
876 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
877 *
878 * <pre>
879 * UserInfo beyond what is determined as part of security handshake
880 * at connection time (kerberos, tokens etc).
881 * </pre>
882 */
883 public boolean hasUserInfo() {
884 return ((bitField0_ & 0x00000001) == 0x00000001);
885 }
886 /**
887 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
888 *
889 * <pre>
890 * UserInfo beyond what is determined as part of security handshake
891 * at connection time (kerberos, tokens etc).
892 * </pre>
893 */
894 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto getUserInfo() {
895 return userInfo_;
896 }
897 /**
898 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
899 *
900 * <pre>
901 * UserInfo beyond what is determined as part of security handshake
902 * at connection time (kerberos, tokens etc).
903 * </pre>
904 */
905 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder getUserInfoOrBuilder() {
906 return userInfo_;
907 }
908
909 // optional string protocol = 3;
910 public static final int PROTOCOL_FIELD_NUMBER = 3;
911 private java.lang.Object protocol_;
912 /**
913 * <code>optional string protocol = 3;</code>
914 *
915 * <pre>
916 * Protocol name for next rpc layer.
917 * The client created a proxy with this protocol name
918 * </pre>
919 */
920 public boolean hasProtocol() {
921 return ((bitField0_ & 0x00000002) == 0x00000002);
922 }
923 /**
924 * <code>optional string protocol = 3;</code>
925 *
926 * <pre>
927 * Protocol name for next rpc layer.
928 * The client created a proxy with this protocol name
929 * </pre>
930 */
931 public java.lang.String getProtocol() {
932 java.lang.Object ref = protocol_;
933 if (ref instanceof java.lang.String) {
934 return (java.lang.String) ref;
935 } else {
936 com.google.protobuf.ByteString bs =
937 (com.google.protobuf.ByteString) ref;
938 java.lang.String s = bs.toStringUtf8();
939 if (bs.isValidUtf8()) {
940 protocol_ = s;
941 }
942 return s;
943 }
944 }
945 /**
946 * <code>optional string protocol = 3;</code>
947 *
948 * <pre>
949 * Protocol name for next rpc layer.
950 * The client created a proxy with this protocol name
951 * </pre>
952 */
953 public com.google.protobuf.ByteString
954 getProtocolBytes() {
955 java.lang.Object ref = protocol_;
956 if (ref instanceof java.lang.String) {
957 com.google.protobuf.ByteString b =
958 com.google.protobuf.ByteString.copyFromUtf8(
959 (java.lang.String) ref);
960 protocol_ = b;
961 return b;
962 } else {
963 return (com.google.protobuf.ByteString) ref;
964 }
965 }
966
967 private void initFields() {
968 userInfo_ = org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance();
969 protocol_ = "";
970 }
971 private byte memoizedIsInitialized = -1;
972 public final boolean isInitialized() {
973 byte isInitialized = memoizedIsInitialized;
974 if (isInitialized != -1) return isInitialized == 1;
975
976 memoizedIsInitialized = 1;
977 return true;
978 }
979
980 public void writeTo(com.google.protobuf.CodedOutputStream output)
981 throws java.io.IOException {
982 getSerializedSize();
983 if (((bitField0_ & 0x00000001) == 0x00000001)) {
984 output.writeMessage(2, userInfo_);
985 }
986 if (((bitField0_ & 0x00000002) == 0x00000002)) {
987 output.writeBytes(3, getProtocolBytes());
988 }
989 getUnknownFields().writeTo(output);
990 }
991
992 private int memoizedSerializedSize = -1;
993 public int getSerializedSize() {
994 int size = memoizedSerializedSize;
995 if (size != -1) return size;
996
997 size = 0;
998 if (((bitField0_ & 0x00000001) == 0x00000001)) {
999 size += com.google.protobuf.CodedOutputStream
1000 .computeMessageSize(2, userInfo_);
1001 }
1002 if (((bitField0_ & 0x00000002) == 0x00000002)) {
1003 size += com.google.protobuf.CodedOutputStream
1004 .computeBytesSize(3, getProtocolBytes());
1005 }
1006 size += getUnknownFields().getSerializedSize();
1007 memoizedSerializedSize = size;
1008 return size;
1009 }
1010
1011 private static final long serialVersionUID = 0L;
1012 @java.lang.Override
1013 protected java.lang.Object writeReplace()
1014 throws java.io.ObjectStreamException {
1015 return super.writeReplace();
1016 }
1017
1018 @java.lang.Override
1019 public boolean equals(final java.lang.Object obj) {
1020 if (obj == this) {
1021 return true;
1022 }
1023 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto)) {
1024 return super.equals(obj);
1025 }
1026 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto other = (org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto) obj;
1027
1028 boolean result = true;
1029 result = result && (hasUserInfo() == other.hasUserInfo());
1030 if (hasUserInfo()) {
1031 result = result && getUserInfo()
1032 .equals(other.getUserInfo());
1033 }
1034 result = result && (hasProtocol() == other.hasProtocol());
1035 if (hasProtocol()) {
1036 result = result && getProtocol()
1037 .equals(other.getProtocol());
1038 }
1039 result = result &&
1040 getUnknownFields().equals(other.getUnknownFields());
1041 return result;
1042 }
1043
1044 private int memoizedHashCode = 0;
1045 @java.lang.Override
1046 public int hashCode() {
1047 if (memoizedHashCode != 0) {
1048 return memoizedHashCode;
1049 }
1050 int hash = 41;
1051 hash = (19 * hash) + getDescriptorForType().hashCode();
1052 if (hasUserInfo()) {
1053 hash = (37 * hash) + USERINFO_FIELD_NUMBER;
1054 hash = (53 * hash) + getUserInfo().hashCode();
1055 }
1056 if (hasProtocol()) {
1057 hash = (37 * hash) + PROTOCOL_FIELD_NUMBER;
1058 hash = (53 * hash) + getProtocol().hashCode();
1059 }
1060 hash = (29 * hash) + getUnknownFields().hashCode();
1061 memoizedHashCode = hash;
1062 return hash;
1063 }
1064
1065 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
1066 com.google.protobuf.ByteString data)
1067 throws com.google.protobuf.InvalidProtocolBufferException {
1068 return PARSER.parseFrom(data);
1069 }
1070 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
1071 com.google.protobuf.ByteString data,
1072 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1073 throws com.google.protobuf.InvalidProtocolBufferException {
1074 return PARSER.parseFrom(data, extensionRegistry);
1075 }
1076 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(byte[] data)
1077 throws com.google.protobuf.InvalidProtocolBufferException {
1078 return PARSER.parseFrom(data);
1079 }
1080 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
1081 byte[] data,
1082 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1083 throws com.google.protobuf.InvalidProtocolBufferException {
1084 return PARSER.parseFrom(data, extensionRegistry);
1085 }
1086 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(java.io.InputStream input)
1087 throws java.io.IOException {
1088 return PARSER.parseFrom(input);
1089 }
1090 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
1091 java.io.InputStream input,
1092 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1093 throws java.io.IOException {
1094 return PARSER.parseFrom(input, extensionRegistry);
1095 }
1096 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseDelimitedFrom(java.io.InputStream input)
1097 throws java.io.IOException {
1098 return PARSER.parseDelimitedFrom(input);
1099 }
1100 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseDelimitedFrom(
1101 java.io.InputStream input,
1102 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1103 throws java.io.IOException {
1104 return PARSER.parseDelimitedFrom(input, extensionRegistry);
1105 }
1106 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
1107 com.google.protobuf.CodedInputStream input)
1108 throws java.io.IOException {
1109 return PARSER.parseFrom(input);
1110 }
1111 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
1112 com.google.protobuf.CodedInputStream input,
1113 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1114 throws java.io.IOException {
1115 return PARSER.parseFrom(input, extensionRegistry);
1116 }
1117
1118 public static Builder newBuilder() { return Builder.create(); }
1119 public Builder newBuilderForType() { return newBuilder(); }
1120 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto prototype) {
1121 return newBuilder().mergeFrom(prototype);
1122 }
1123 public Builder toBuilder() { return newBuilder(this); }
1124
1125 @java.lang.Override
1126 protected Builder newBuilderForType(
1127 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1128 Builder builder = new Builder(parent);
1129 return builder;
1130 }
1131 /**
1132 * Protobuf type {@code hadoop.common.IpcConnectionContextProto}
1133 *
1134 * <pre>
1135 **
1136 * The connection context is sent as part of the connection establishment.
1137 * It establishes the context for ALL Rpc calls within the connection.
1138 * </pre>
1139 */
1140 public static final class Builder extends
1141 com.google.protobuf.GeneratedMessage.Builder<Builder>
1142 implements org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProtoOrBuilder {
1143 public static final com.google.protobuf.Descriptors.Descriptor
1144 getDescriptor() {
1145 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_hadoop_common_IpcConnectionContextProto_descriptor;
1146 }
1147
1148 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1149 internalGetFieldAccessorTable() {
1150 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_hadoop_common_IpcConnectionContextProto_fieldAccessorTable
1151 .ensureFieldAccessorsInitialized(
1152 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.class, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.Builder.class);
1153 }
1154
1155 // Construct using org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.newBuilder()
1156 private Builder() {
1157 maybeForceBuilderInitialization();
1158 }
1159
1160 private Builder(
1161 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1162 super(parent);
1163 maybeForceBuilderInitialization();
1164 }
1165 private void maybeForceBuilderInitialization() {
1166 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1167 getUserInfoFieldBuilder();
1168 }
1169 }
1170 private static Builder create() {
1171 return new Builder();
1172 }
1173
1174 public Builder clear() {
1175 super.clear();
1176 if (userInfoBuilder_ == null) {
1177 userInfo_ = org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance();
1178 } else {
1179 userInfoBuilder_.clear();
1180 }
1181 bitField0_ = (bitField0_ & ~0x00000001);
1182 protocol_ = "";
1183 bitField0_ = (bitField0_ & ~0x00000002);
1184 return this;
1185 }
1186
1187 public Builder clone() {
1188 return create().mergeFrom(buildPartial());
1189 }
1190
1191 public com.google.protobuf.Descriptors.Descriptor
1192 getDescriptorForType() {
1193 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_hadoop_common_IpcConnectionContextProto_descriptor;
1194 }
1195
1196 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto getDefaultInstanceForType() {
1197 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.getDefaultInstance();
1198 }
1199
1200 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto build() {
1201 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto result = buildPartial();
1202 if (!result.isInitialized()) {
1203 throw newUninitializedMessageException(result);
1204 }
1205 return result;
1206 }
1207
1208 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto buildPartial() {
1209 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto result = new org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto(this);
1210 int from_bitField0_ = bitField0_;
1211 int to_bitField0_ = 0;
1212 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1213 to_bitField0_ |= 0x00000001;
1214 }
1215 if (userInfoBuilder_ == null) {
1216 result.userInfo_ = userInfo_;
1217 } else {
1218 result.userInfo_ = userInfoBuilder_.build();
1219 }
1220 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1221 to_bitField0_ |= 0x00000002;
1222 }
1223 result.protocol_ = protocol_;
1224 result.bitField0_ = to_bitField0_;
1225 onBuilt();
1226 return result;
1227 }
1228
1229 public Builder mergeFrom(com.google.protobuf.Message other) {
1230 if (other instanceof org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto) {
1231 return mergeFrom((org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto)other);
1232 } else {
1233 super.mergeFrom(other);
1234 return this;
1235 }
1236 }
1237
1238 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto other) {
1239 if (other == org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.getDefaultInstance()) return this;
1240 if (other.hasUserInfo()) {
1241 mergeUserInfo(other.getUserInfo());
1242 }
1243 if (other.hasProtocol()) {
1244 bitField0_ |= 0x00000002;
1245 protocol_ = other.protocol_;
1246 onChanged();
1247 }
1248 this.mergeUnknownFields(other.getUnknownFields());
1249 return this;
1250 }
1251
1252 public final boolean isInitialized() {
1253 return true;
1254 }
1255
1256 public Builder mergeFrom(
1257 com.google.protobuf.CodedInputStream input,
1258 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1259 throws java.io.IOException {
1260 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parsedMessage = null;
1261 try {
1262 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1263 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1264 parsedMessage = (org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto) e.getUnfinishedMessage();
1265 throw e;
1266 } finally {
1267 if (parsedMessage != null) {
1268 mergeFrom(parsedMessage);
1269 }
1270 }
1271 return this;
1272 }
1273 private int bitField0_;
1274
1275 // optional .hadoop.common.UserInformationProto userInfo = 2;
1276 private org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto userInfo_ = org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance();
1277 private com.google.protobuf.SingleFieldBuilder<
1278 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder> userInfoBuilder_;
1279 /**
1280 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
1281 *
1282 * <pre>
1283 * UserInfo beyond what is determined as part of security handshake
1284 * at connection time (kerberos, tokens etc).
1285 * </pre>
1286 */
1287 public boolean hasUserInfo() {
1288 return ((bitField0_ & 0x00000001) == 0x00000001);
1289 }
1290 /**
1291 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
1292 *
1293 * <pre>
1294 * UserInfo beyond what is determined as part of security handshake
1295 * at connection time (kerberos, tokens etc).
1296 * </pre>
1297 */
1298 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto getUserInfo() {
1299 if (userInfoBuilder_ == null) {
1300 return userInfo_;
1301 } else {
1302 return userInfoBuilder_.getMessage();
1303 }
1304 }
1305 /**
1306 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
1307 *
1308 * <pre>
1309 * UserInfo beyond what is determined as part of security handshake
1310 * at connection time (kerberos, tokens etc).
1311 * </pre>
1312 */
1313 public Builder setUserInfo(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto value) {
1314 if (userInfoBuilder_ == null) {
1315 if (value == null) {
1316 throw new NullPointerException();
1317 }
1318 userInfo_ = value;
1319 onChanged();
1320 } else {
1321 userInfoBuilder_.setMessage(value);
1322 }
1323 bitField0_ |= 0x00000001;
1324 return this;
1325 }
1326 /**
1327 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
1328 *
1329 * <pre>
1330 * UserInfo beyond what is determined as part of security handshake
1331 * at connection time (kerberos, tokens etc).
1332 * </pre>
1333 */
1334 public Builder setUserInfo(
1335 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder builderForValue) {
1336 if (userInfoBuilder_ == null) {
1337 userInfo_ = builderForValue.build();
1338 onChanged();
1339 } else {
1340 userInfoBuilder_.setMessage(builderForValue.build());
1341 }
1342 bitField0_ |= 0x00000001;
1343 return this;
1344 }
1345 /**
1346 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
1347 *
1348 * <pre>
1349 * UserInfo beyond what is determined as part of security handshake
1350 * at connection time (kerberos, tokens etc).
1351 * </pre>
1352 */
1353 public Builder mergeUserInfo(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto value) {
1354 if (userInfoBuilder_ == null) {
1355 if (((bitField0_ & 0x00000001) == 0x00000001) &&
1356 userInfo_ != org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance()) {
1357 userInfo_ =
1358 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.newBuilder(userInfo_).mergeFrom(value).buildPartial();
1359 } else {
1360 userInfo_ = value;
1361 }
1362 onChanged();
1363 } else {
1364 userInfoBuilder_.mergeFrom(value);
1365 }
1366 bitField0_ |= 0x00000001;
1367 return this;
1368 }
1369 /**
1370 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
1371 *
1372 * <pre>
1373 * UserInfo beyond what is determined as part of security handshake
1374 * at connection time (kerberos, tokens etc).
1375 * </pre>
1376 */
1377 public Builder clearUserInfo() {
1378 if (userInfoBuilder_ == null) {
1379 userInfo_ = org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance();
1380 onChanged();
1381 } else {
1382 userInfoBuilder_.clear();
1383 }
1384 bitField0_ = (bitField0_ & ~0x00000001);
1385 return this;
1386 }
1387 /**
1388 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
1389 *
1390 * <pre>
1391 * UserInfo beyond what is determined as part of security handshake
1392 * at connection time (kerberos, tokens etc).
1393 * </pre>
1394 */
1395 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder getUserInfoBuilder() {
1396 bitField0_ |= 0x00000001;
1397 onChanged();
1398 return getUserInfoFieldBuilder().getBuilder();
1399 }
1400 /**
1401 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
1402 *
1403 * <pre>
1404 * UserInfo beyond what is determined as part of security handshake
1405 * at connection time (kerberos, tokens etc).
1406 * </pre>
1407 */
1408 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder getUserInfoOrBuilder() {
1409 if (userInfoBuilder_ != null) {
1410 return userInfoBuilder_.getMessageOrBuilder();
1411 } else {
1412 return userInfo_;
1413 }
1414 }
1415 /**
1416 * <code>optional .hadoop.common.UserInformationProto userInfo = 2;</code>
1417 *
1418 * <pre>
1419 * UserInfo beyond what is determined as part of security handshake
1420 * at connection time (kerberos, tokens etc).
1421 * </pre>
1422 */
1423 private com.google.protobuf.SingleFieldBuilder<
1424 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder>
1425 getUserInfoFieldBuilder() {
1426 if (userInfoBuilder_ == null) {
1427 userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
1428 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder>(
1429 userInfo_,
1430 getParentForChildren(),
1431 isClean());
1432 userInfo_ = null;
1433 }
1434 return userInfoBuilder_;
1435 }
1436
1437 // optional string protocol = 3;
1438 private java.lang.Object protocol_ = "";
1439 /**
1440 * <code>optional string protocol = 3;</code>
1441 *
1442 * <pre>
1443 * Protocol name for next rpc layer.
1444 * The client created a proxy with this protocol name
1445 * </pre>
1446 */
1447 public boolean hasProtocol() {
1448 return ((bitField0_ & 0x00000002) == 0x00000002);
1449 }
1450 /**
1451 * <code>optional string protocol = 3;</code>
1452 *
1453 * <pre>
1454 * Protocol name for next rpc layer.
1455 * The client created a proxy with this protocol name
1456 * </pre>
1457 */
1458 public java.lang.String getProtocol() {
1459 java.lang.Object ref = protocol_;
1460 if (!(ref instanceof java.lang.String)) {
1461 java.lang.String s = ((com.google.protobuf.ByteString) ref)
1462 .toStringUtf8();
1463 protocol_ = s;
1464 return s;
1465 } else {
1466 return (java.lang.String) ref;
1467 }
1468 }
1469 /**
1470 * <code>optional string protocol = 3;</code>
1471 *
1472 * <pre>
1473 * Protocol name for next rpc layer.
1474 * The client created a proxy with this protocol name
1475 * </pre>
1476 */
1477 public com.google.protobuf.ByteString
1478 getProtocolBytes() {
1479 java.lang.Object ref = protocol_;
1480 if (ref instanceof String) {
1481 com.google.protobuf.ByteString b =
1482 com.google.protobuf.ByteString.copyFromUtf8(
1483 (java.lang.String) ref);
1484 protocol_ = b;
1485 return b;
1486 } else {
1487 return (com.google.protobuf.ByteString) ref;
1488 }
1489 }
1490 /**
1491 * <code>optional string protocol = 3;</code>
1492 *
1493 * <pre>
1494 * Protocol name for next rpc layer.
1495 * The client created a proxy with this protocol name
1496 * </pre>
1497 */
1498 public Builder setProtocol(
1499 java.lang.String value) {
1500 if (value == null) {
1501 throw new NullPointerException();
1502 }
1503 bitField0_ |= 0x00000002;
1504 protocol_ = value;
1505 onChanged();
1506 return this;
1507 }
1508 /**
1509 * <code>optional string protocol = 3;</code>
1510 *
1511 * <pre>
1512 * Protocol name for next rpc layer.
1513 * The client created a proxy with this protocol name
1514 * </pre>
1515 */
1516 public Builder clearProtocol() {
1517 bitField0_ = (bitField0_ & ~0x00000002);
1518 protocol_ = getDefaultInstance().getProtocol();
1519 onChanged();
1520 return this;
1521 }
1522 /**
1523 * <code>optional string protocol = 3;</code>
1524 *
1525 * <pre>
1526 * Protocol name for next rpc layer.
1527 * The client created a proxy with this protocol name
1528 * </pre>
1529 */
1530 public Builder setProtocolBytes(
1531 com.google.protobuf.ByteString value) {
1532 if (value == null) {
1533 throw new NullPointerException();
1534 }
1535 bitField0_ |= 0x00000002;
1536 protocol_ = value;
1537 onChanged();
1538 return this;
1539 }
1540
1541 // @@protoc_insertion_point(builder_scope:hadoop.common.IpcConnectionContextProto)
1542 }
1543
1544 static {
1545 defaultInstance = new IpcConnectionContextProto(true);
1546 defaultInstance.initFields();
1547 }
1548
1549 // @@protoc_insertion_point(class_scope:hadoop.common.IpcConnectionContextProto)
1550 }
1551
1552 private static com.google.protobuf.Descriptors.Descriptor
1553 internal_static_hadoop_common_UserInformationProto_descriptor;
1554 private static
1555 com.google.protobuf.GeneratedMessage.FieldAccessorTable
1556 internal_static_hadoop_common_UserInformationProto_fieldAccessorTable;
1557 private static com.google.protobuf.Descriptors.Descriptor
1558 internal_static_hadoop_common_IpcConnectionContextProto_descriptor;
1559 private static
1560 com.google.protobuf.GeneratedMessage.FieldAccessorTable
1561 internal_static_hadoop_common_IpcConnectionContextProto_fieldAccessorTable;
1562
1563 public static com.google.protobuf.Descriptors.FileDescriptor
1564 getDescriptor() {
1565 return descriptor;
1566 }
1567 private static com.google.protobuf.Descriptors.FileDescriptor
1568 descriptor;
1569 static {
1570 java.lang.String[] descriptorData = {
1571 "\n\032IpcConnectionContext.proto\022\rhadoop.com" +
1572 "mon\"?\n\024UserInformationProto\022\025\n\reffective" +
1573 "User\030\001 \001(\t\022\020\n\010realUser\030\002 \001(\t\"d\n\031IpcConne" +
1574 "ctionContextProto\0225\n\010userInfo\030\002 \001(\0132#.ha" +
1575 "doop.common.UserInformationProto\022\020\n\010prot" +
1576 "ocol\030\003 \001(\tB?\n\036org.apache.hadoop.ipc.prot" +
1577 "obufB\032IpcConnectionContextProtos\240\001\001"
1578 };
1579 com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
1580 new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
1581 public com.google.protobuf.ExtensionRegistry assignDescriptors(
1582 com.google.protobuf.Descriptors.FileDescriptor root) {
1583 descriptor = root;
1584 internal_static_hadoop_common_UserInformationProto_descriptor =
1585 getDescriptor().getMessageTypes().get(0);
1586 internal_static_hadoop_common_UserInformationProto_fieldAccessorTable = new
1587 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1588 internal_static_hadoop_common_UserInformationProto_descriptor,
1589 new java.lang.String[] { "EffectiveUser", "RealUser", });
1590 internal_static_hadoop_common_IpcConnectionContextProto_descriptor =
1591 getDescriptor().getMessageTypes().get(1);
1592 internal_static_hadoop_common_IpcConnectionContextProto_fieldAccessorTable = new
1593 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1594 internal_static_hadoop_common_IpcConnectionContextProto_descriptor,
1595 new java.lang.String[] { "UserInfo", "Protocol", });
1596 return null;
1597 }
1598 };
1599 com.google.protobuf.Descriptors.FileDescriptor
1600 .internalBuildGeneratedFileFrom(descriptorData,
1601 new com.google.protobuf.Descriptors.FileDescriptor[] {
1602 }, assigner);
1603 }
1604
1605 // @@protoc_insertion_point(outer_class_scope)
1606 }