YarnSecurityTokenProtos.java
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: yarn_security_token.proto
// Protobuf Java Version: 3.25.5
package org.apache.hadoop.yarn.proto;
public final class YarnSecurityTokenProtos {
private YarnSecurityTokenProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
public interface NMTokenIdentifierProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NMTokenIdentifierProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return Whether the appAttemptId field is set.
*/
boolean hasAppAttemptId();
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return The appAttemptId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId();
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder();
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
* @return Whether the nodeId field is set.
*/
boolean hasNodeId();
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
* @return The nodeId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();
/**
* <code>optional string appSubmitter = 3;</code>
* @return Whether the appSubmitter field is set.
*/
boolean hasAppSubmitter();
/**
* <code>optional string appSubmitter = 3;</code>
* @return The appSubmitter.
*/
java.lang.String getAppSubmitter();
/**
* <code>optional string appSubmitter = 3;</code>
* @return The bytes for appSubmitter.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getAppSubmitterBytes();
/**
* <code>optional int32 keyId = 4 [default = -1];</code>
* @return Whether the keyId field is set.
*/
boolean hasKeyId();
/**
* <code>optional int32 keyId = 4 [default = -1];</code>
* @return The keyId.
*/
int getKeyId();
}
/**
* Protobuf type {@code hadoop.yarn.NMTokenIdentifierProto}
*/
public static final class NMTokenIdentifierProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NMTokenIdentifierProto)
NMTokenIdentifierProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use NMTokenIdentifierProto.newBuilder() to construct.
private NMTokenIdentifierProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private NMTokenIdentifierProto() {
appSubmitter_ = "";
keyId_ = -1;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NMTokenIdentifierProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_NMTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_NMTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto.Builder.class);
}
private int bitField0_;
public static final int APPATTEMPTID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto appAttemptId_;
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return Whether the appAttemptId field is set.
*/
@java.lang.Override
public boolean hasAppAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return The appAttemptId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId() {
return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder() {
return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
}
public static final int NODEID_FIELD_NUMBER = 2;
private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
* @return Whether the nodeId field is set.
*/
@java.lang.Override
public boolean hasNodeId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
* @return The nodeId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
public static final int APPSUBMITTER_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object appSubmitter_ = "";
/**
* <code>optional string appSubmitter = 3;</code>
* @return Whether the appSubmitter field is set.
*/
@java.lang.Override
public boolean hasAppSubmitter() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <code>optional string appSubmitter = 3;</code>
* @return The appSubmitter.
*/
@java.lang.Override
public java.lang.String getAppSubmitter() {
java.lang.Object ref = appSubmitter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
appSubmitter_ = s;
}
return s;
}
}
/**
* <code>optional string appSubmitter = 3;</code>
* @return The bytes for appSubmitter.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAppSubmitterBytes() {
java.lang.Object ref = appSubmitter_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
appSubmitter_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int KEYID_FIELD_NUMBER = 4;
private int keyId_ = -1;
/**
* <code>optional int32 keyId = 4 [default = -1];</code>
* @return Whether the keyId field is set.
*/
@java.lang.Override
public boolean hasKeyId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <code>optional int32 keyId = 4 [default = -1];</code>
* @return The keyId.
*/
@java.lang.Override
public int getKeyId() {
return keyId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppAttemptId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getNodeId());
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, appSubmitter_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeInt32(4, keyId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppAttemptId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, getNodeId());
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, appSubmitter_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(4, keyId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto other = (org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto) obj;
if (hasAppAttemptId() != other.hasAppAttemptId()) return false;
if (hasAppAttemptId()) {
if (!getAppAttemptId()
.equals(other.getAppAttemptId())) return false;
}
if (hasNodeId() != other.hasNodeId()) return false;
if (hasNodeId()) {
if (!getNodeId()
.equals(other.getNodeId())) return false;
}
if (hasAppSubmitter() != other.hasAppSubmitter()) return false;
if (hasAppSubmitter()) {
if (!getAppSubmitter()
.equals(other.getAppSubmitter())) return false;
}
if (hasKeyId() != other.hasKeyId()) return false;
if (hasKeyId()) {
if (getKeyId()
!= other.getKeyId()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppAttemptId()) {
hash = (37 * hash) + APPATTEMPTID_FIELD_NUMBER;
hash = (53 * hash) + getAppAttemptId().hashCode();
}
if (hasNodeId()) {
hash = (37 * hash) + NODEID_FIELD_NUMBER;
hash = (53 * hash) + getNodeId().hashCode();
}
if (hasAppSubmitter()) {
hash = (37 * hash) + APPSUBMITTER_FIELD_NUMBER;
hash = (53 * hash) + getAppSubmitter().hashCode();
}
if (hasKeyId()) {
hash = (37 * hash) + KEYID_FIELD_NUMBER;
hash = (53 * hash) + getKeyId();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NMTokenIdentifierProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NMTokenIdentifierProto)
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_NMTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_NMTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppAttemptIdFieldBuilder();
getNodeIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appAttemptId_ = null;
if (appAttemptIdBuilder_ != null) {
appAttemptIdBuilder_.dispose();
appAttemptIdBuilder_ = null;
}
nodeId_ = null;
if (nodeIdBuilder_ != null) {
nodeIdBuilder_.dispose();
nodeIdBuilder_ = null;
}
appSubmitter_ = "";
keyId_ = -1;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_NMTokenIdentifierProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto build() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto result = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appAttemptId_ = appAttemptIdBuilder_ == null
? appAttemptId_
: appAttemptIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nodeId_ = nodeIdBuilder_ == null
? nodeId_
: nodeIdBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.appSubmitter_ = appSubmitter_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.keyId_ = keyId_;
to_bitField0_ |= 0x00000008;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto.getDefaultInstance()) return this;
if (other.hasAppAttemptId()) {
mergeAppAttemptId(other.getAppAttemptId());
}
if (other.hasNodeId()) {
mergeNodeId(other.getNodeId());
}
if (other.hasAppSubmitter()) {
appSubmitter_ = other.appSubmitter_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.hasKeyId()) {
setKeyId(other.getKeyId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppAttemptIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getNodeIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
appSubmitter_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32: {
keyId_ = input.readInt32();
bitField0_ |= 0x00000008;
break;
} // case 32
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto appAttemptId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> appAttemptIdBuilder_;
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return Whether the appAttemptId field is set.
*/
public boolean hasAppAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return The appAttemptId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId() {
if (appAttemptIdBuilder_ == null) {
return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
} else {
return appAttemptIdBuilder_.getMessage();
}
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder setAppAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
if (appAttemptIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appAttemptId_ = value;
} else {
appAttemptIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder setAppAttemptId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
if (appAttemptIdBuilder_ == null) {
appAttemptId_ = builderForValue.build();
} else {
appAttemptIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder mergeAppAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
if (appAttemptIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appAttemptId_ != null &&
appAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
getAppAttemptIdBuilder().mergeFrom(value);
} else {
appAttemptId_ = value;
}
} else {
appAttemptIdBuilder_.mergeFrom(value);
}
if (appAttemptId_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder clearAppAttemptId() {
bitField0_ = (bitField0_ & ~0x00000001);
appAttemptId_ = null;
if (appAttemptIdBuilder_ != null) {
appAttemptIdBuilder_.dispose();
appAttemptIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getAppAttemptIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppAttemptIdFieldBuilder().getBuilder();
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder() {
if (appAttemptIdBuilder_ != null) {
return appAttemptIdBuilder_.getMessageOrBuilder();
} else {
return appAttemptId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
}
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>
getAppAttemptIdFieldBuilder() {
if (appAttemptIdBuilder_ == null) {
appAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
getAppAttemptId(),
getParentForChildren(),
isClean());
appAttemptId_ = null;
}
return appAttemptIdBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
* @return Whether the nodeId field is set.
*/
public boolean hasNodeId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
* @return The nodeId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
if (nodeIdBuilder_ == null) {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
} else {
return nodeIdBuilder_.getMessage();
}
}
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
*/
public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (nodeIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeId_ = value;
} else {
nodeIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
*/
public Builder setNodeId(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
if (nodeIdBuilder_ == null) {
nodeId_ = builderForValue.build();
} else {
nodeIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
*/
public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (nodeIdBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
nodeId_ != null &&
nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
getNodeIdBuilder().mergeFrom(value);
} else {
nodeId_ = value;
}
} else {
nodeIdBuilder_.mergeFrom(value);
}
if (nodeId_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
*/
public Builder clearNodeId() {
bitField0_ = (bitField0_ & ~0x00000002);
nodeId_ = null;
if (nodeIdBuilder_ != null) {
nodeIdBuilder_.dispose();
nodeIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getNodeIdFieldBuilder().getBuilder();
}
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
if (nodeIdBuilder_ != null) {
return nodeIdBuilder_.getMessageOrBuilder();
} else {
return nodeId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
}
/**
* <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>
getNodeIdFieldBuilder() {
if (nodeIdBuilder_ == null) {
nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
getNodeId(),
getParentForChildren(),
isClean());
nodeId_ = null;
}
return nodeIdBuilder_;
}
private java.lang.Object appSubmitter_ = "";
/**
* <code>optional string appSubmitter = 3;</code>
* @return Whether the appSubmitter field is set.
*/
public boolean hasAppSubmitter() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <code>optional string appSubmitter = 3;</code>
* @return The appSubmitter.
*/
public java.lang.String getAppSubmitter() {
java.lang.Object ref = appSubmitter_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
appSubmitter_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string appSubmitter = 3;</code>
* @return The bytes for appSubmitter.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAppSubmitterBytes() {
java.lang.Object ref = appSubmitter_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
appSubmitter_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* <code>optional string appSubmitter = 3;</code>
* @param value The appSubmitter to set.
* @return This builder for chaining.
*/
public Builder setAppSubmitter(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
appSubmitter_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <code>optional string appSubmitter = 3;</code>
* @return This builder for chaining.
*/
public Builder clearAppSubmitter() {
appSubmitter_ = getDefaultInstance().getAppSubmitter();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <code>optional string appSubmitter = 3;</code>
* @param value The bytes for appSubmitter to set.
* @return This builder for chaining.
*/
public Builder setAppSubmitterBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
appSubmitter_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int keyId_ = -1;
/**
* <code>optional int32 keyId = 4 [default = -1];</code>
* @return Whether the keyId field is set.
*/
@java.lang.Override
public boolean hasKeyId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <code>optional int32 keyId = 4 [default = -1];</code>
* @return The keyId.
*/
@java.lang.Override
public int getKeyId() {
return keyId_;
}
/**
* <code>optional int32 keyId = 4 [default = -1];</code>
* @param value The keyId to set.
* @return This builder for chaining.
*/
public Builder setKeyId(int value) {
keyId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <code>optional int32 keyId = 4 [default = -1];</code>
* @return This builder for chaining.
*/
public Builder clearKeyId() {
bitField0_ = (bitField0_ & ~0x00000008);
keyId_ = -1;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NMTokenIdentifierProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NMTokenIdentifierProto)
private static final org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto();
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NMTokenIdentifierProto>
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NMTokenIdentifierProto>() {
@java.lang.Override
public NMTokenIdentifierProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser<NMTokenIdentifierProto> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser<NMTokenIdentifierProto> getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.NMTokenIdentifierProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AMRMTokenIdentifierProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.AMRMTokenIdentifierProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return Whether the appAttemptId field is set.
*/
boolean hasAppAttemptId();
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return The appAttemptId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId();
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder();
/**
* <code>optional int32 keyId = 2 [default = -1];</code>
* @return Whether the keyId field is set.
*/
boolean hasKeyId();
/**
* <code>optional int32 keyId = 2 [default = -1];</code>
* @return The keyId.
*/
int getKeyId();
}
/**
* Protobuf type {@code hadoop.yarn.AMRMTokenIdentifierProto}
*/
public static final class AMRMTokenIdentifierProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.AMRMTokenIdentifierProto)
AMRMTokenIdentifierProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AMRMTokenIdentifierProto.newBuilder() to construct.
private AMRMTokenIdentifierProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AMRMTokenIdentifierProto() {
keyId_ = -1;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AMRMTokenIdentifierProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_AMRMTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_AMRMTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto.Builder.class);
}
private int bitField0_;
public static final int APPATTEMPTID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto appAttemptId_;
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return Whether the appAttemptId field is set.
*/
@java.lang.Override
public boolean hasAppAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return The appAttemptId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId() {
return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder() {
return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
}
public static final int KEYID_FIELD_NUMBER = 2;
private int keyId_ = -1;
/**
* <code>optional int32 keyId = 2 [default = -1];</code>
* @return Whether the keyId field is set.
*/
@java.lang.Override
public boolean hasKeyId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional int32 keyId = 2 [default = -1];</code>
* @return The keyId.
*/
@java.lang.Override
public int getKeyId() {
return keyId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppAttemptId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt32(2, keyId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppAttemptId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(2, keyId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto other = (org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto) obj;
if (hasAppAttemptId() != other.hasAppAttemptId()) return false;
if (hasAppAttemptId()) {
if (!getAppAttemptId()
.equals(other.getAppAttemptId())) return false;
}
if (hasKeyId() != other.hasKeyId()) return false;
if (hasKeyId()) {
if (getKeyId()
!= other.getKeyId()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppAttemptId()) {
hash = (37 * hash) + APPATTEMPTID_FIELD_NUMBER;
hash = (53 * hash) + getAppAttemptId().hashCode();
}
if (hasKeyId()) {
hash = (37 * hash) + KEYID_FIELD_NUMBER;
hash = (53 * hash) + getKeyId();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.AMRMTokenIdentifierProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.AMRMTokenIdentifierProto)
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_AMRMTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_AMRMTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppAttemptIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appAttemptId_ = null;
if (appAttemptIdBuilder_ != null) {
appAttemptIdBuilder_.dispose();
appAttemptIdBuilder_ = null;
}
keyId_ = -1;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_AMRMTokenIdentifierProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto build() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto result = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appAttemptId_ = appAttemptIdBuilder_ == null
? appAttemptId_
: appAttemptIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.keyId_ = keyId_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto.getDefaultInstance()) return this;
if (other.hasAppAttemptId()) {
mergeAppAttemptId(other.getAppAttemptId());
}
if (other.hasKeyId()) {
setKeyId(other.getKeyId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppAttemptIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16: {
keyId_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto appAttemptId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> appAttemptIdBuilder_;
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return Whether the appAttemptId field is set.
*/
public boolean hasAppAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return The appAttemptId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId() {
if (appAttemptIdBuilder_ == null) {
return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
} else {
return appAttemptIdBuilder_.getMessage();
}
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder setAppAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
if (appAttemptIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appAttemptId_ = value;
} else {
appAttemptIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder setAppAttemptId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
if (appAttemptIdBuilder_ == null) {
appAttemptId_ = builderForValue.build();
} else {
appAttemptIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder mergeAppAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
if (appAttemptIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appAttemptId_ != null &&
appAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
getAppAttemptIdBuilder().mergeFrom(value);
} else {
appAttemptId_ = value;
}
} else {
appAttemptIdBuilder_.mergeFrom(value);
}
if (appAttemptId_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder clearAppAttemptId() {
bitField0_ = (bitField0_ & ~0x00000001);
appAttemptId_ = null;
if (appAttemptIdBuilder_ != null) {
appAttemptIdBuilder_.dispose();
appAttemptIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getAppAttemptIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppAttemptIdFieldBuilder().getBuilder();
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder() {
if (appAttemptIdBuilder_ != null) {
return appAttemptIdBuilder_.getMessageOrBuilder();
} else {
return appAttemptId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
}
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>
getAppAttemptIdFieldBuilder() {
if (appAttemptIdBuilder_ == null) {
appAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
getAppAttemptId(),
getParentForChildren(),
isClean());
appAttemptId_ = null;
}
return appAttemptIdBuilder_;
}
private int keyId_ = -1;
/**
* <code>optional int32 keyId = 2 [default = -1];</code>
* @return Whether the keyId field is set.
*/
@java.lang.Override
public boolean hasKeyId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional int32 keyId = 2 [default = -1];</code>
* @return The keyId.
*/
@java.lang.Override
public int getKeyId() {
return keyId_;
}
/**
* <code>optional int32 keyId = 2 [default = -1];</code>
* @param value The keyId to set.
* @return This builder for chaining.
*/
public Builder setKeyId(int value) {
keyId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <code>optional int32 keyId = 2 [default = -1];</code>
* @return This builder for chaining.
*/
public Builder clearKeyId() {
bitField0_ = (bitField0_ & ~0x00000002);
keyId_ = -1;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.AMRMTokenIdentifierProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.AMRMTokenIdentifierProto)
private static final org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto();
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<AMRMTokenIdentifierProto>
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<AMRMTokenIdentifierProto>() {
@java.lang.Override
public AMRMTokenIdentifierProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser<AMRMTokenIdentifierProto> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser<AMRMTokenIdentifierProto> getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.AMRMTokenIdentifierProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ContainerTokenIdentifierProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerTokenIdentifierProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
* @return Whether the containerId field is set.
*/
boolean hasContainerId();
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
* @return The containerId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();
/**
* <code>optional string nmHostAddr = 2;</code>
* @return Whether the nmHostAddr field is set.
*/
boolean hasNmHostAddr();
/**
* <code>optional string nmHostAddr = 2;</code>
* @return The nmHostAddr.
*/
java.lang.String getNmHostAddr();
/**
* <code>optional string nmHostAddr = 2;</code>
* @return The bytes for nmHostAddr.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getNmHostAddrBytes();
/**
* <code>optional string appSubmitter = 3;</code>
* @return Whether the appSubmitter field is set.
*/
boolean hasAppSubmitter();
/**
* <code>optional string appSubmitter = 3;</code>
* @return The appSubmitter.
*/
java.lang.String getAppSubmitter();
/**
* <code>optional string appSubmitter = 3;</code>
* @return The bytes for appSubmitter.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getAppSubmitterBytes();
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
* @return Whether the resource field is set.
*/
boolean hasResource();
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
* @return The resource.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource();
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder();
/**
* <code>optional int64 expiryTimeStamp = 5;</code>
* @return Whether the expiryTimeStamp field is set.
*/
boolean hasExpiryTimeStamp();
/**
* <code>optional int64 expiryTimeStamp = 5;</code>
* @return The expiryTimeStamp.
*/
long getExpiryTimeStamp();
/**
* <code>optional int32 masterKeyId = 6 [default = -1];</code>
* @return Whether the masterKeyId field is set.
*/
boolean hasMasterKeyId();
/**
* <code>optional int32 masterKeyId = 6 [default = -1];</code>
* @return The masterKeyId.
*/
int getMasterKeyId();
/**
* <code>optional int64 rmIdentifier = 7;</code>
* @return Whether the rmIdentifier field is set.
*/
boolean hasRmIdentifier();
/**
* <code>optional int64 rmIdentifier = 7;</code>
* @return The rmIdentifier.
*/
long getRmIdentifier();
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
* @return Whether the priority field is set.
*/
boolean hasPriority();
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
* @return The priority.
*/
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
*/
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();
/**
* <code>optional int64 creationTime = 9;</code>
* @return Whether the creationTime field is set.
*/
boolean hasCreationTime();
/**
* <code>optional int64 creationTime = 9;</code>
* @return The creationTime.
*/
long getCreationTime();
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
* @return Whether the logAggregationContext field is set.
*/
boolean hasLogAggregationContext();
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
* @return The logAggregationContext.
*/
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext();
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
*/
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder();
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @return Whether the nodeLabelExpression field is set.
*/
boolean hasNodeLabelExpression();
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @return The nodeLabelExpression.
*/
java.lang.String getNodeLabelExpression();
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @return The bytes for nodeLabelExpression.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelExpressionBytes();
/**
* <code>optional .hadoop.yarn.ContainerTypeProto containerType = 12;</code>
* @return Whether the containerType field is set.
*/
boolean hasContainerType();
/**
* <code>optional .hadoop.yarn.ContainerTypeProto containerType = 12;</code>
* @return The containerType.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto getContainerType();
/**
* <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 13 [default = GUARANTEED];</code>
* @return Whether the executionType field is set.
*/
boolean hasExecutionType();
/**
* <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 13 [default = GUARANTEED];</code>
* @return The executionType.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType();
/**
* <code>optional int32 version = 14 [default = 0];</code>
* @return Whether the version field is set.
*/
boolean hasVersion();
/**
* <code>optional int32 version = 14 [default = 0];</code>
* @return The version.
*/
int getVersion();
/**
* <code>optional int64 allocation_request_id = 15 [default = -1];</code>
* @return Whether the allocationRequestId field is set.
*/
boolean hasAllocationRequestId();
/**
* <code>optional int64 allocation_request_id = 15 [default = -1];</code>
* @return The allocationRequestId.
*/
long getAllocationRequestId();
/**
* <code>repeated string allocation_tags = 16;</code>
* @return A list containing the allocationTags.
*/
java.util.List<java.lang.String>
getAllocationTagsList();
/**
* <code>repeated string allocation_tags = 16;</code>
* @return The count of allocationTags.
*/
int getAllocationTagsCount();
/**
* <code>repeated string allocation_tags = 16;</code>
* @param index The index of the element to return.
* @return The allocationTags at the given index.
*/
java.lang.String getAllocationTags(int index);
/**
* <code>repeated string allocation_tags = 16;</code>
* @param index The index of the value to return.
* @return The bytes of the allocationTags at the given index.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getAllocationTagsBytes(int index);
}
/**
* Protobuf type {@code hadoop.yarn.ContainerTokenIdentifierProto}
*/
public static final class ContainerTokenIdentifierProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerTokenIdentifierProto)
ContainerTokenIdentifierProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ContainerTokenIdentifierProto.newBuilder() to construct.
private ContainerTokenIdentifierProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ContainerTokenIdentifierProto() {
nmHostAddr_ = "";
appSubmitter_ = "";
masterKeyId_ = -1;
nodeLabelExpression_ = "";
containerType_ = 1;
executionType_ = 1;
allocationRequestId_ = -1L;
allocationTags_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ContainerTokenIdentifierProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_ContainerTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_ContainerTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto.Builder.class);
}
private int bitField0_;
public static final int CONTAINERID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
* @return Whether the containerId field is set.
*/
@java.lang.Override
public boolean hasContainerId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
* @return The containerId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
}
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
}
public static final int NMHOSTADDR_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nmHostAddr_ = "";
/**
* <code>optional string nmHostAddr = 2;</code>
* @return Whether the nmHostAddr field is set.
*/
@java.lang.Override
public boolean hasNmHostAddr() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional string nmHostAddr = 2;</code>
* @return The nmHostAddr.
*/
@java.lang.Override
public java.lang.String getNmHostAddr() {
java.lang.Object ref = nmHostAddr_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
nmHostAddr_ = s;
}
return s;
}
}
/**
* <code>optional string nmHostAddr = 2;</code>
* @return The bytes for nmHostAddr.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getNmHostAddrBytes() {
java.lang.Object ref = nmHostAddr_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nmHostAddr_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int APPSUBMITTER_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object appSubmitter_ = "";
/**
* <code>optional string appSubmitter = 3;</code>
* @return Whether the appSubmitter field is set.
*/
@java.lang.Override
public boolean hasAppSubmitter() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <code>optional string appSubmitter = 3;</code>
* @return The appSubmitter.
*/
@java.lang.Override
public java.lang.String getAppSubmitter() {
java.lang.Object ref = appSubmitter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
appSubmitter_ = s;
}
return s;
}
}
/**
* <code>optional string appSubmitter = 3;</code>
* @return The bytes for appSubmitter.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAppSubmitterBytes() {
java.lang.Object ref = appSubmitter_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
appSubmitter_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 4;
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
* @return Whether the resource field is set.
*/
@java.lang.Override
public boolean hasResource() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
* @return The resource.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
public static final int EXPIRYTIMESTAMP_FIELD_NUMBER = 5;
private long expiryTimeStamp_ = 0L;
/**
* <code>optional int64 expiryTimeStamp = 5;</code>
* @return Whether the expiryTimeStamp field is set.
*/
@java.lang.Override
public boolean hasExpiryTimeStamp() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* <code>optional int64 expiryTimeStamp = 5;</code>
* @return The expiryTimeStamp.
*/
@java.lang.Override
public long getExpiryTimeStamp() {
return expiryTimeStamp_;
}
public static final int MASTERKEYID_FIELD_NUMBER = 6;
private int masterKeyId_ = -1;
/**
* <code>optional int32 masterKeyId = 6 [default = -1];</code>
* @return Whether the masterKeyId field is set.
*/
@java.lang.Override
public boolean hasMasterKeyId() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* <code>optional int32 masterKeyId = 6 [default = -1];</code>
* @return The masterKeyId.
*/
@java.lang.Override
public int getMasterKeyId() {
return masterKeyId_;
}
public static final int RMIDENTIFIER_FIELD_NUMBER = 7;
private long rmIdentifier_ = 0L;
/**
* <code>optional int64 rmIdentifier = 7;</code>
* @return Whether the rmIdentifier field is set.
*/
@java.lang.Override
public boolean hasRmIdentifier() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* <code>optional int64 rmIdentifier = 7;</code>
* @return The rmIdentifier.
*/
@java.lang.Override
public long getRmIdentifier() {
return rmIdentifier_;
}
public static final int PRIORITY_FIELD_NUMBER = 8;
private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
* @return Whether the priority field is set.
*/
@java.lang.Override
public boolean hasPriority() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
* @return The priority.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
}
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
}
public static final int CREATIONTIME_FIELD_NUMBER = 9;
private long creationTime_ = 0L;
/**
* <code>optional int64 creationTime = 9;</code>
* @return Whether the creationTime field is set.
*/
@java.lang.Override
public boolean hasCreationTime() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* <code>optional int64 creationTime = 9;</code>
* @return The creationTime.
*/
@java.lang.Override
public long getCreationTime() {
return creationTime_;
}
public static final int LOGAGGREGATIONCONTEXT_FIELD_NUMBER = 10;
private org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto logAggregationContext_;
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
* @return Whether the logAggregationContext field is set.
*/
@java.lang.Override
public boolean hasLogAggregationContext() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
* @return The logAggregationContext.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext() {
return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
}
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder() {
return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
}
public static final int NODELABELEXPRESSION_FIELD_NUMBER = 11;
@SuppressWarnings("serial")
private volatile java.lang.Object nodeLabelExpression_ = "";
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @return Whether the nodeLabelExpression field is set.
*/
@java.lang.Override
public boolean hasNodeLabelExpression() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @return The nodeLabelExpression.
*/
@java.lang.Override
public java.lang.String getNodeLabelExpression() {
java.lang.Object ref = nodeLabelExpression_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
nodeLabelExpression_ = s;
}
return s;
}
}
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @return The bytes for nodeLabelExpression.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelExpressionBytes() {
java.lang.Object ref = nodeLabelExpression_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nodeLabelExpression_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int CONTAINERTYPE_FIELD_NUMBER = 12;
private int containerType_ = 1;
/**
* <code>optional .hadoop.yarn.ContainerTypeProto containerType = 12;</code>
* @return Whether the containerType field is set.
*/
@java.lang.Override public boolean hasContainerType() {
return ((bitField0_ & 0x00000800) != 0);
}
/**
* <code>optional .hadoop.yarn.ContainerTypeProto containerType = 12;</code>
* @return The containerType.
*/
@java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto getContainerType() {
org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto.forNumber(containerType_);
return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto.APPLICATION_MASTER : result;
}
public static final int EXECUTIONTYPE_FIELD_NUMBER = 13;
private int executionType_ = 1;
/**
* <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 13 [default = GUARANTEED];</code>
* @return Whether the executionType field is set.
*/
@java.lang.Override public boolean hasExecutionType() {
return ((bitField0_ & 0x00001000) != 0);
}
/**
* <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 13 [default = GUARANTEED];</code>
* @return The executionType.
*/
@java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
}
public static final int VERSION_FIELD_NUMBER = 14;
private int version_ = 0;
/**
* <code>optional int32 version = 14 [default = 0];</code>
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00002000) != 0);
}
/**
* <code>optional int32 version = 14 [default = 0];</code>
* @return The version.
*/
@java.lang.Override
public int getVersion() {
return version_;
}
public static final int ALLOCATION_REQUEST_ID_FIELD_NUMBER = 15;
private long allocationRequestId_ = -1L;
/**
* <code>optional int64 allocation_request_id = 15 [default = -1];</code>
* @return Whether the allocationRequestId field is set.
*/
@java.lang.Override
public boolean hasAllocationRequestId() {
return ((bitField0_ & 0x00004000) != 0);
}
/**
* <code>optional int64 allocation_request_id = 15 [default = -1];</code>
* @return The allocationRequestId.
*/
@java.lang.Override
public long getAllocationRequestId() {
return allocationRequestId_;
}
public static final int ALLOCATION_TAGS_FIELD_NUMBER = 16;
@SuppressWarnings("serial")
private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList allocationTags_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
/**
* <code>repeated string allocation_tags = 16;</code>
* @return A list containing the allocationTags.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getAllocationTagsList() {
return allocationTags_;
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @return The count of allocationTags.
*/
public int getAllocationTagsCount() {
return allocationTags_.size();
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @param index The index of the element to return.
* @return The allocationTags at the given index.
*/
public java.lang.String getAllocationTags(int index) {
return allocationTags_.get(index);
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @param index The index of the value to return.
* @return The bytes of the allocationTags at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAllocationTagsBytes(int index) {
return allocationTags_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasResource()) {
if (!getResource().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getContainerId());
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, nmHostAddr_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, appSubmitter_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(4, getResource());
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeInt64(5, expiryTimeStamp_);
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeInt32(6, masterKeyId_);
}
if (((bitField0_ & 0x00000040) != 0)) {
output.writeInt64(7, rmIdentifier_);
}
if (((bitField0_ & 0x00000080) != 0)) {
output.writeMessage(8, getPriority());
}
if (((bitField0_ & 0x00000100) != 0)) {
output.writeInt64(9, creationTime_);
}
if (((bitField0_ & 0x00000200) != 0)) {
output.writeMessage(10, getLogAggregationContext());
}
if (((bitField0_ & 0x00000400) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 11, nodeLabelExpression_);
}
if (((bitField0_ & 0x00000800) != 0)) {
output.writeEnum(12, containerType_);
}
if (((bitField0_ & 0x00001000) != 0)) {
output.writeEnum(13, executionType_);
}
if (((bitField0_ & 0x00002000) != 0)) {
output.writeInt32(14, version_);
}
if (((bitField0_ & 0x00004000) != 0)) {
output.writeInt64(15, allocationRequestId_);
}
for (int i = 0; i < allocationTags_.size(); i++) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 16, allocationTags_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getContainerId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, nmHostAddr_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, appSubmitter_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(4, getResource());
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(5, expiryTimeStamp_);
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(6, masterKeyId_);
}
if (((bitField0_ & 0x00000040) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(7, rmIdentifier_);
}
if (((bitField0_ & 0x00000080) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(8, getPriority());
}
if (((bitField0_ & 0x00000100) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(9, creationTime_);
}
if (((bitField0_ & 0x00000200) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(10, getLogAggregationContext());
}
if (((bitField0_ & 0x00000400) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(11, nodeLabelExpression_);
}
if (((bitField0_ & 0x00000800) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(12, containerType_);
}
if (((bitField0_ & 0x00001000) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(13, executionType_);
}
if (((bitField0_ & 0x00002000) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(14, version_);
}
if (((bitField0_ & 0x00004000) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(15, allocationRequestId_);
}
{
int dataSize = 0;
for (int i = 0; i < allocationTags_.size(); i++) {
dataSize += computeStringSizeNoTag(allocationTags_.getRaw(i));
}
size += dataSize;
size += 2 * getAllocationTagsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto other = (org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto) obj;
if (hasContainerId() != other.hasContainerId()) return false;
if (hasContainerId()) {
if (!getContainerId()
.equals(other.getContainerId())) return false;
}
if (hasNmHostAddr() != other.hasNmHostAddr()) return false;
if (hasNmHostAddr()) {
if (!getNmHostAddr()
.equals(other.getNmHostAddr())) return false;
}
if (hasAppSubmitter() != other.hasAppSubmitter()) return false;
if (hasAppSubmitter()) {
if (!getAppSubmitter()
.equals(other.getAppSubmitter())) return false;
}
if (hasResource() != other.hasResource()) return false;
if (hasResource()) {
if (!getResource()
.equals(other.getResource())) return false;
}
if (hasExpiryTimeStamp() != other.hasExpiryTimeStamp()) return false;
if (hasExpiryTimeStamp()) {
if (getExpiryTimeStamp()
!= other.getExpiryTimeStamp()) return false;
}
if (hasMasterKeyId() != other.hasMasterKeyId()) return false;
if (hasMasterKeyId()) {
if (getMasterKeyId()
!= other.getMasterKeyId()) return false;
}
if (hasRmIdentifier() != other.hasRmIdentifier()) return false;
if (hasRmIdentifier()) {
if (getRmIdentifier()
!= other.getRmIdentifier()) return false;
}
if (hasPriority() != other.hasPriority()) return false;
if (hasPriority()) {
if (!getPriority()
.equals(other.getPriority())) return false;
}
if (hasCreationTime() != other.hasCreationTime()) return false;
if (hasCreationTime()) {
if (getCreationTime()
!= other.getCreationTime()) return false;
}
if (hasLogAggregationContext() != other.hasLogAggregationContext()) return false;
if (hasLogAggregationContext()) {
if (!getLogAggregationContext()
.equals(other.getLogAggregationContext())) return false;
}
if (hasNodeLabelExpression() != other.hasNodeLabelExpression()) return false;
if (hasNodeLabelExpression()) {
if (!getNodeLabelExpression()
.equals(other.getNodeLabelExpression())) return false;
}
if (hasContainerType() != other.hasContainerType()) return false;
if (hasContainerType()) {
if (containerType_ != other.containerType_) return false;
}
if (hasExecutionType() != other.hasExecutionType()) return false;
if (hasExecutionType()) {
if (executionType_ != other.executionType_) return false;
}
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (getVersion()
!= other.getVersion()) return false;
}
if (hasAllocationRequestId() != other.hasAllocationRequestId()) return false;
if (hasAllocationRequestId()) {
if (getAllocationRequestId()
!= other.getAllocationRequestId()) return false;
}
if (!getAllocationTagsList()
.equals(other.getAllocationTagsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasContainerId()) {
hash = (37 * hash) + CONTAINERID_FIELD_NUMBER;
hash = (53 * hash) + getContainerId().hashCode();
}
if (hasNmHostAddr()) {
hash = (37 * hash) + NMHOSTADDR_FIELD_NUMBER;
hash = (53 * hash) + getNmHostAddr().hashCode();
}
if (hasAppSubmitter()) {
hash = (37 * hash) + APPSUBMITTER_FIELD_NUMBER;
hash = (53 * hash) + getAppSubmitter().hashCode();
}
if (hasResource()) {
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
}
if (hasExpiryTimeStamp()) {
hash = (37 * hash) + EXPIRYTIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getExpiryTimeStamp());
}
if (hasMasterKeyId()) {
hash = (37 * hash) + MASTERKEYID_FIELD_NUMBER;
hash = (53 * hash) + getMasterKeyId();
}
if (hasRmIdentifier()) {
hash = (37 * hash) + RMIDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getRmIdentifier());
}
if (hasPriority()) {
hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
hash = (53 * hash) + getPriority().hashCode();
}
if (hasCreationTime()) {
hash = (37 * hash) + CREATIONTIME_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getCreationTime());
}
if (hasLogAggregationContext()) {
hash = (37 * hash) + LOGAGGREGATIONCONTEXT_FIELD_NUMBER;
hash = (53 * hash) + getLogAggregationContext().hashCode();
}
if (hasNodeLabelExpression()) {
hash = (37 * hash) + NODELABELEXPRESSION_FIELD_NUMBER;
hash = (53 * hash) + getNodeLabelExpression().hashCode();
}
if (hasContainerType()) {
hash = (37 * hash) + CONTAINERTYPE_FIELD_NUMBER;
hash = (53 * hash) + containerType_;
}
if (hasExecutionType()) {
hash = (37 * hash) + EXECUTIONTYPE_FIELD_NUMBER;
hash = (53 * hash) + executionType_;
}
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion();
}
if (hasAllocationRequestId()) {
hash = (37 * hash) + ALLOCATION_REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getAllocationRequestId());
}
if (getAllocationTagsCount() > 0) {
hash = (37 * hash) + ALLOCATION_TAGS_FIELD_NUMBER;
hash = (53 * hash) + getAllocationTagsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.ContainerTokenIdentifierProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerTokenIdentifierProto)
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_ContainerTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_ContainerTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getContainerIdFieldBuilder();
getResourceFieldBuilder();
getPriorityFieldBuilder();
getLogAggregationContextFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
containerId_ = null;
if (containerIdBuilder_ != null) {
containerIdBuilder_.dispose();
containerIdBuilder_ = null;
}
nmHostAddr_ = "";
appSubmitter_ = "";
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
expiryTimeStamp_ = 0L;
masterKeyId_ = -1;
rmIdentifier_ = 0L;
priority_ = null;
if (priorityBuilder_ != null) {
priorityBuilder_.dispose();
priorityBuilder_ = null;
}
creationTime_ = 0L;
logAggregationContext_ = null;
if (logAggregationContextBuilder_ != null) {
logAggregationContextBuilder_.dispose();
logAggregationContextBuilder_ = null;
}
nodeLabelExpression_ = "";
containerType_ = 1;
executionType_ = 1;
version_ = 0;
allocationRequestId_ = -1L;
allocationTags_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_ContainerTokenIdentifierProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto build() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto result = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.containerId_ = containerIdBuilder_ == null
? containerId_
: containerIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nmHostAddr_ = nmHostAddr_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.appSubmitter_ = appSubmitter_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.resource_ = resourceBuilder_ == null
? resource_
: resourceBuilder_.build();
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.expiryTimeStamp_ = expiryTimeStamp_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.masterKeyId_ = masterKeyId_;
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.rmIdentifier_ = rmIdentifier_;
to_bitField0_ |= 0x00000040;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.priority_ = priorityBuilder_ == null
? priority_
: priorityBuilder_.build();
to_bitField0_ |= 0x00000080;
}
if (((from_bitField0_ & 0x00000100) != 0)) {
result.creationTime_ = creationTime_;
to_bitField0_ |= 0x00000100;
}
if (((from_bitField0_ & 0x00000200) != 0)) {
result.logAggregationContext_ = logAggregationContextBuilder_ == null
? logAggregationContext_
: logAggregationContextBuilder_.build();
to_bitField0_ |= 0x00000200;
}
if (((from_bitField0_ & 0x00000400) != 0)) {
result.nodeLabelExpression_ = nodeLabelExpression_;
to_bitField0_ |= 0x00000400;
}
if (((from_bitField0_ & 0x00000800) != 0)) {
result.containerType_ = containerType_;
to_bitField0_ |= 0x00000800;
}
if (((from_bitField0_ & 0x00001000) != 0)) {
result.executionType_ = executionType_;
to_bitField0_ |= 0x00001000;
}
if (((from_bitField0_ & 0x00002000) != 0)) {
result.version_ = version_;
to_bitField0_ |= 0x00002000;
}
if (((from_bitField0_ & 0x00004000) != 0)) {
result.allocationRequestId_ = allocationRequestId_;
to_bitField0_ |= 0x00004000;
}
if (((from_bitField0_ & 0x00008000) != 0)) {
allocationTags_.makeImmutable();
result.allocationTags_ = allocationTags_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto.getDefaultInstance()) return this;
if (other.hasContainerId()) {
mergeContainerId(other.getContainerId());
}
if (other.hasNmHostAddr()) {
nmHostAddr_ = other.nmHostAddr_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasAppSubmitter()) {
appSubmitter_ = other.appSubmitter_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.hasResource()) {
mergeResource(other.getResource());
}
if (other.hasExpiryTimeStamp()) {
setExpiryTimeStamp(other.getExpiryTimeStamp());
}
if (other.hasMasterKeyId()) {
setMasterKeyId(other.getMasterKeyId());
}
if (other.hasRmIdentifier()) {
setRmIdentifier(other.getRmIdentifier());
}
if (other.hasPriority()) {
mergePriority(other.getPriority());
}
if (other.hasCreationTime()) {
setCreationTime(other.getCreationTime());
}
if (other.hasLogAggregationContext()) {
mergeLogAggregationContext(other.getLogAggregationContext());
}
if (other.hasNodeLabelExpression()) {
nodeLabelExpression_ = other.nodeLabelExpression_;
bitField0_ |= 0x00000400;
onChanged();
}
if (other.hasContainerType()) {
setContainerType(other.getContainerType());
}
if (other.hasExecutionType()) {
setExecutionType(other.getExecutionType());
}
if (other.hasVersion()) {
setVersion(other.getVersion());
}
if (other.hasAllocationRequestId()) {
setAllocationRequestId(other.getAllocationRequestId());
}
if (!other.allocationTags_.isEmpty()) {
if (allocationTags_.isEmpty()) {
allocationTags_ = other.allocationTags_;
bitField0_ |= 0x00008000;
} else {
ensureAllocationTagsIsMutable();
allocationTags_.addAll(other.allocationTags_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasResource()) {
if (!getResource().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getContainerIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
nmHostAddr_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
appSubmitter_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34: {
input.readMessage(
getResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000008;
break;
} // case 34
case 40: {
expiryTimeStamp_ = input.readInt64();
bitField0_ |= 0x00000010;
break;
} // case 40
case 48: {
masterKeyId_ = input.readInt32();
bitField0_ |= 0x00000020;
break;
} // case 48
case 56: {
rmIdentifier_ = input.readInt64();
bitField0_ |= 0x00000040;
break;
} // case 56
case 66: {
input.readMessage(
getPriorityFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000080;
break;
} // case 66
case 72: {
creationTime_ = input.readInt64();
bitField0_ |= 0x00000100;
break;
} // case 72
case 82: {
input.readMessage(
getLogAggregationContextFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000200;
break;
} // case 82
case 90: {
nodeLabelExpression_ = input.readBytes();
bitField0_ |= 0x00000400;
break;
} // case 90
case 96: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto tmpValue =
org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(12, tmpRaw);
} else {
containerType_ = tmpRaw;
bitField0_ |= 0x00000800;
}
break;
} // case 96
case 104: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue =
org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(13, tmpRaw);
} else {
executionType_ = tmpRaw;
bitField0_ |= 0x00001000;
}
break;
} // case 104
case 112: {
version_ = input.readInt32();
bitField0_ |= 0x00002000;
break;
} // case 112
case 120: {
allocationRequestId_ = input.readInt64();
bitField0_ |= 0x00004000;
break;
} // case 120
case 130: {
org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
ensureAllocationTagsIsMutable();
allocationTags_.add(bs);
break;
} // case 130
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
* @return Whether the containerId field is set.
*/
public boolean hasContainerId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
* @return The containerId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
if (containerIdBuilder_ == null) {
return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
} else {
return containerIdBuilder_.getMessage();
}
}
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
*/
public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
if (containerIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
containerId_ = value;
} else {
containerIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
*/
public Builder setContainerId(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
if (containerIdBuilder_ == null) {
containerId_ = builderForValue.build();
} else {
containerIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
*/
public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
if (containerIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
containerId_ != null &&
containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
getContainerIdBuilder().mergeFrom(value);
} else {
containerId_ = value;
}
} else {
containerIdBuilder_.mergeFrom(value);
}
if (containerId_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
*/
public Builder clearContainerId() {
bitField0_ = (bitField0_ & ~0x00000001);
containerId_ = null;
if (containerIdBuilder_ != null) {
containerIdBuilder_.dispose();
containerIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getContainerIdFieldBuilder().getBuilder();
}
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
if (containerIdBuilder_ != null) {
return containerIdBuilder_.getMessageOrBuilder();
} else {
return containerId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
}
}
/**
* <code>optional .hadoop.yarn.ContainerIdProto containerId = 1;</code>
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>
getContainerIdFieldBuilder() {
if (containerIdBuilder_ == null) {
containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
getContainerId(),
getParentForChildren(),
isClean());
containerId_ = null;
}
return containerIdBuilder_;
}
private java.lang.Object nmHostAddr_ = "";
/**
* <code>optional string nmHostAddr = 2;</code>
* @return Whether the nmHostAddr field is set.
*/
public boolean hasNmHostAddr() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional string nmHostAddr = 2;</code>
* @return The nmHostAddr.
*/
public java.lang.String getNmHostAddr() {
java.lang.Object ref = nmHostAddr_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
nmHostAddr_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string nmHostAddr = 2;</code>
* @return The bytes for nmHostAddr.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getNmHostAddrBytes() {
java.lang.Object ref = nmHostAddr_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nmHostAddr_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* <code>optional string nmHostAddr = 2;</code>
* @param value The nmHostAddr to set.
* @return This builder for chaining.
*/
public Builder setNmHostAddr(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nmHostAddr_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <code>optional string nmHostAddr = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNmHostAddr() {
nmHostAddr_ = getDefaultInstance().getNmHostAddr();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <code>optional string nmHostAddr = 2;</code>
* @param value The bytes for nmHostAddr to set.
* @return This builder for chaining.
*/
public Builder setNmHostAddrBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
nmHostAddr_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object appSubmitter_ = "";
/**
* <code>optional string appSubmitter = 3;</code>
* @return Whether the appSubmitter field is set.
*/
public boolean hasAppSubmitter() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <code>optional string appSubmitter = 3;</code>
* @return The appSubmitter.
*/
public java.lang.String getAppSubmitter() {
java.lang.Object ref = appSubmitter_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
appSubmitter_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string appSubmitter = 3;</code>
* @return The bytes for appSubmitter.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAppSubmitterBytes() {
java.lang.Object ref = appSubmitter_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
appSubmitter_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* <code>optional string appSubmitter = 3;</code>
* @param value The appSubmitter to set.
* @return This builder for chaining.
*/
public Builder setAppSubmitter(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
appSubmitter_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <code>optional string appSubmitter = 3;</code>
* @return This builder for chaining.
*/
public Builder clearAppSubmitter() {
appSubmitter_ = getDefaultInstance().getAppSubmitter();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <code>optional string appSubmitter = 3;</code>
* @param value The bytes for appSubmitter to set.
* @return This builder for chaining.
*/
public Builder setAppSubmitterBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
appSubmitter_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_;
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
* @return Whether the resource field is set.
*/
public boolean hasResource() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
* @return The resource.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
if (resourceBuilder_ == null) {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
} else {
return resourceBuilder_.getMessage();
}
}
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
*/
public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (resourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
} else {
resourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
*/
public Builder setResource(
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
if (resourceBuilder_ == null) {
resource_ = builderForValue.build();
} else {
resourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
*/
public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (resourceBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0) &&
resource_ != null &&
resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
getResourceBuilder().mergeFrom(value);
} else {
resource_ = value;
}
} else {
resourceBuilder_.mergeFrom(value);
}
if (resource_ != null) {
bitField0_ |= 0x00000008;
onChanged();
}
return this;
}
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
*/
public Builder clearResource() {
bitField0_ = (bitField0_ & ~0x00000008);
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getResourceFieldBuilder().getBuilder();
}
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
if (resourceBuilder_ != null) {
return resourceBuilder_.getMessageOrBuilder();
} else {
return resource_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
}
/**
* <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>
getResourceFieldBuilder() {
if (resourceBuilder_ == null) {
resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
getResource(),
getParentForChildren(),
isClean());
resource_ = null;
}
return resourceBuilder_;
}
private long expiryTimeStamp_ ;
/**
* <code>optional int64 expiryTimeStamp = 5;</code>
* @return Whether the expiryTimeStamp field is set.
*/
@java.lang.Override
public boolean hasExpiryTimeStamp() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* <code>optional int64 expiryTimeStamp = 5;</code>
* @return The expiryTimeStamp.
*/
@java.lang.Override
public long getExpiryTimeStamp() {
return expiryTimeStamp_;
}
/**
* <code>optional int64 expiryTimeStamp = 5;</code>
* @param value The expiryTimeStamp to set.
* @return This builder for chaining.
*/
public Builder setExpiryTimeStamp(long value) {
expiryTimeStamp_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* <code>optional int64 expiryTimeStamp = 5;</code>
* @return This builder for chaining.
*/
public Builder clearExpiryTimeStamp() {
bitField0_ = (bitField0_ & ~0x00000010);
expiryTimeStamp_ = 0L;
onChanged();
return this;
}
private int masterKeyId_ = -1;
/**
* <code>optional int32 masterKeyId = 6 [default = -1];</code>
* @return Whether the masterKeyId field is set.
*/
@java.lang.Override
public boolean hasMasterKeyId() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* <code>optional int32 masterKeyId = 6 [default = -1];</code>
* @return The masterKeyId.
*/
@java.lang.Override
public int getMasterKeyId() {
return masterKeyId_;
}
/**
* <code>optional int32 masterKeyId = 6 [default = -1];</code>
* @param value The masterKeyId to set.
* @return This builder for chaining.
*/
public Builder setMasterKeyId(int value) {
masterKeyId_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
* <code>optional int32 masterKeyId = 6 [default = -1];</code>
* @return This builder for chaining.
*/
public Builder clearMasterKeyId() {
bitField0_ = (bitField0_ & ~0x00000020);
masterKeyId_ = -1;
onChanged();
return this;
}
private long rmIdentifier_ ;
/**
* <code>optional int64 rmIdentifier = 7;</code>
* @return Whether the rmIdentifier field is set.
*/
@java.lang.Override
public boolean hasRmIdentifier() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* <code>optional int64 rmIdentifier = 7;</code>
* @return The rmIdentifier.
*/
@java.lang.Override
public long getRmIdentifier() {
return rmIdentifier_;
}
/**
* <code>optional int64 rmIdentifier = 7;</code>
* @param value The rmIdentifier to set.
* @return This builder for chaining.
*/
public Builder setRmIdentifier(long value) {
rmIdentifier_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* <code>optional int64 rmIdentifier = 7;</code>
* @return This builder for chaining.
*/
public Builder clearRmIdentifier() {
bitField0_ = (bitField0_ & ~0x00000040);
rmIdentifier_ = 0L;
onChanged();
return this;
}
private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
* @return Whether the priority field is set.
*/
public boolean hasPriority() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
* @return The priority.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
if (priorityBuilder_ == null) {
return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
} else {
return priorityBuilder_.getMessage();
}
}
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
*/
public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
if (priorityBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
priority_ = value;
} else {
priorityBuilder_.setMessage(value);
}
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
*/
public Builder setPriority(
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
if (priorityBuilder_ == null) {
priority_ = builderForValue.build();
} else {
priorityBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
*/
public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
if (priorityBuilder_ == null) {
if (((bitField0_ & 0x00000080) != 0) &&
priority_ != null &&
priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
getPriorityBuilder().mergeFrom(value);
} else {
priority_ = value;
}
} else {
priorityBuilder_.mergeFrom(value);
}
if (priority_ != null) {
bitField0_ |= 0x00000080;
onChanged();
}
return this;
}
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
*/
public Builder clearPriority() {
bitField0_ = (bitField0_ & ~0x00000080);
priority_ = null;
if (priorityBuilder_ != null) {
priorityBuilder_.dispose();
priorityBuilder_ = null;
}
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
bitField0_ |= 0x00000080;
onChanged();
return getPriorityFieldBuilder().getBuilder();
}
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
if (priorityBuilder_ != null) {
return priorityBuilder_.getMessageOrBuilder();
} else {
return priority_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
}
}
/**
* <code>optional .hadoop.yarn.PriorityProto priority = 8;</code>
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>
getPriorityFieldBuilder() {
if (priorityBuilder_ == null) {
priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
getPriority(),
getParentForChildren(),
isClean());
priority_ = null;
}
return priorityBuilder_;
}
private long creationTime_ ;
/**
* <code>optional int64 creationTime = 9;</code>
* @return Whether the creationTime field is set.
*/
@java.lang.Override
public boolean hasCreationTime() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* <code>optional int64 creationTime = 9;</code>
* @return The creationTime.
*/
@java.lang.Override
public long getCreationTime() {
return creationTime_;
}
/**
* <code>optional int64 creationTime = 9;</code>
* @param value The creationTime to set.
* @return This builder for chaining.
*/
public Builder setCreationTime(long value) {
creationTime_ = value;
bitField0_ |= 0x00000100;
onChanged();
return this;
}
/**
* <code>optional int64 creationTime = 9;</code>
* @return This builder for chaining.
*/
public Builder clearCreationTime() {
bitField0_ = (bitField0_ & ~0x00000100);
creationTime_ = 0L;
onChanged();
return this;
}
private org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto logAggregationContext_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder> logAggregationContextBuilder_;
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
* @return Whether the logAggregationContext field is set.
*/
public boolean hasLogAggregationContext() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
* @return The logAggregationContext.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext() {
if (logAggregationContextBuilder_ == null) {
return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
} else {
return logAggregationContextBuilder_.getMessage();
}
}
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
*/
public Builder setLogAggregationContext(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto value) {
if (logAggregationContextBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
logAggregationContext_ = value;
} else {
logAggregationContextBuilder_.setMessage(value);
}
bitField0_ |= 0x00000200;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
*/
public Builder setLogAggregationContext(
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder builderForValue) {
if (logAggregationContextBuilder_ == null) {
logAggregationContext_ = builderForValue.build();
} else {
logAggregationContextBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000200;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
*/
public Builder mergeLogAggregationContext(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto value) {
if (logAggregationContextBuilder_ == null) {
if (((bitField0_ & 0x00000200) != 0) &&
logAggregationContext_ != null &&
logAggregationContext_ != org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance()) {
getLogAggregationContextBuilder().mergeFrom(value);
} else {
logAggregationContext_ = value;
}
} else {
logAggregationContextBuilder_.mergeFrom(value);
}
if (logAggregationContext_ != null) {
bitField0_ |= 0x00000200;
onChanged();
}
return this;
}
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
*/
public Builder clearLogAggregationContext() {
bitField0_ = (bitField0_ & ~0x00000200);
logAggregationContext_ = null;
if (logAggregationContextBuilder_ != null) {
logAggregationContextBuilder_.dispose();
logAggregationContextBuilder_ = null;
}
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder getLogAggregationContextBuilder() {
bitField0_ |= 0x00000200;
onChanged();
return getLogAggregationContextFieldBuilder().getBuilder();
}
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder() {
if (logAggregationContextBuilder_ != null) {
return logAggregationContextBuilder_.getMessageOrBuilder();
} else {
return logAggregationContext_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
}
}
/**
* <code>optional .hadoop.yarn.LogAggregationContextProto logAggregationContext = 10;</code>
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder>
getLogAggregationContextFieldBuilder() {
if (logAggregationContextBuilder_ == null) {
logAggregationContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder>(
getLogAggregationContext(),
getParentForChildren(),
isClean());
logAggregationContext_ = null;
}
return logAggregationContextBuilder_;
}
private java.lang.Object nodeLabelExpression_ = "";
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @return Whether the nodeLabelExpression field is set.
*/
public boolean hasNodeLabelExpression() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @return The nodeLabelExpression.
*/
public java.lang.String getNodeLabelExpression() {
java.lang.Object ref = nodeLabelExpression_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
nodeLabelExpression_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @return The bytes for nodeLabelExpression.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelExpressionBytes() {
java.lang.Object ref = nodeLabelExpression_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nodeLabelExpression_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @param value The nodeLabelExpression to set.
* @return This builder for chaining.
*/
public Builder setNodeLabelExpression(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nodeLabelExpression_ = value;
bitField0_ |= 0x00000400;
onChanged();
return this;
}
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @return This builder for chaining.
*/
public Builder clearNodeLabelExpression() {
nodeLabelExpression_ = getDefaultInstance().getNodeLabelExpression();
bitField0_ = (bitField0_ & ~0x00000400);
onChanged();
return this;
}
/**
* <code>optional string nodeLabelExpression = 11;</code>
* @param value The bytes for nodeLabelExpression to set.
* @return This builder for chaining.
*/
public Builder setNodeLabelExpressionBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
nodeLabelExpression_ = value;
bitField0_ |= 0x00000400;
onChanged();
return this;
}
private int containerType_ = 1;
/**
* <code>optional .hadoop.yarn.ContainerTypeProto containerType = 12;</code>
* @return Whether the containerType field is set.
*/
@java.lang.Override public boolean hasContainerType() {
return ((bitField0_ & 0x00000800) != 0);
}
/**
* <code>optional .hadoop.yarn.ContainerTypeProto containerType = 12;</code>
* @return The containerType.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto getContainerType() {
org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto.forNumber(containerType_);
return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto.APPLICATION_MASTER : result;
}
/**
* <code>optional .hadoop.yarn.ContainerTypeProto containerType = 12;</code>
* @param value The containerType to set.
* @return This builder for chaining.
*/
public Builder setContainerType(org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000800;
containerType_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ContainerTypeProto containerType = 12;</code>
* @return This builder for chaining.
*/
public Builder clearContainerType() {
bitField0_ = (bitField0_ & ~0x00000800);
containerType_ = 1;
onChanged();
return this;
}
private int executionType_ = 1;
/**
* <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 13 [default = GUARANTEED];</code>
* @return Whether the executionType field is set.
*/
@java.lang.Override public boolean hasExecutionType() {
return ((bitField0_ & 0x00001000) != 0);
}
/**
* <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 13 [default = GUARANTEED];</code>
* @return The executionType.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
}
/**
* <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 13 [default = GUARANTEED];</code>
* @param value The executionType to set.
* @return This builder for chaining.
*/
public Builder setExecutionType(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00001000;
executionType_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 13 [default = GUARANTEED];</code>
* @return This builder for chaining.
*/
public Builder clearExecutionType() {
bitField0_ = (bitField0_ & ~0x00001000);
executionType_ = 1;
onChanged();
return this;
}
private int version_ ;
/**
* <code>optional int32 version = 14 [default = 0];</code>
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00002000) != 0);
}
/**
* <code>optional int32 version = 14 [default = 0];</code>
* @return The version.
*/
@java.lang.Override
public int getVersion() {
return version_;
}
/**
* <code>optional int32 version = 14 [default = 0];</code>
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(int value) {
version_ = value;
bitField0_ |= 0x00002000;
onChanged();
return this;
}
/**
* <code>optional int32 version = 14 [default = 0];</code>
* @return This builder for chaining.
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00002000);
version_ = 0;
onChanged();
return this;
}
private long allocationRequestId_ = -1L;
/**
* <code>optional int64 allocation_request_id = 15 [default = -1];</code>
* @return Whether the allocationRequestId field is set.
*/
@java.lang.Override
public boolean hasAllocationRequestId() {
return ((bitField0_ & 0x00004000) != 0);
}
/**
* <code>optional int64 allocation_request_id = 15 [default = -1];</code>
* @return The allocationRequestId.
*/
@java.lang.Override
public long getAllocationRequestId() {
return allocationRequestId_;
}
/**
* <code>optional int64 allocation_request_id = 15 [default = -1];</code>
* @param value The allocationRequestId to set.
* @return This builder for chaining.
*/
public Builder setAllocationRequestId(long value) {
allocationRequestId_ = value;
bitField0_ |= 0x00004000;
onChanged();
return this;
}
/**
* <code>optional int64 allocation_request_id = 15 [default = -1];</code>
* @return This builder for chaining.
*/
public Builder clearAllocationRequestId() {
bitField0_ = (bitField0_ & ~0x00004000);
allocationRequestId_ = -1L;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList allocationTags_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
private void ensureAllocationTagsIsMutable() {
if (!allocationTags_.isModifiable()) {
allocationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(allocationTags_);
}
bitField0_ |= 0x00008000;
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @return A list containing the allocationTags.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getAllocationTagsList() {
allocationTags_.makeImmutable();
return allocationTags_;
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @return The count of allocationTags.
*/
public int getAllocationTagsCount() {
return allocationTags_.size();
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @param index The index of the element to return.
* @return The allocationTags at the given index.
*/
public java.lang.String getAllocationTags(int index) {
return allocationTags_.get(index);
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @param index The index of the value to return.
* @return The bytes of the allocationTags at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAllocationTagsBytes(int index) {
return allocationTags_.getByteString(index);
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @param index The index to set the value at.
* @param value The allocationTags to set.
* @return This builder for chaining.
*/
public Builder setAllocationTags(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureAllocationTagsIsMutable();
allocationTags_.set(index, value);
bitField0_ |= 0x00008000;
onChanged();
return this;
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @param value The allocationTags to add.
* @return This builder for chaining.
*/
public Builder addAllocationTags(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureAllocationTagsIsMutable();
allocationTags_.add(value);
bitField0_ |= 0x00008000;
onChanged();
return this;
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @param values The allocationTags to add.
* @return This builder for chaining.
*/
public Builder addAllAllocationTags(
java.lang.Iterable<java.lang.String> values) {
ensureAllocationTagsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, allocationTags_);
bitField0_ |= 0x00008000;
onChanged();
return this;
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @return This builder for chaining.
*/
public Builder clearAllocationTags() {
allocationTags_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00008000);;
onChanged();
return this;
}
/**
* <code>repeated string allocation_tags = 16;</code>
* @param value The bytes of the allocationTags to add.
* @return This builder for chaining.
*/
public Builder addAllocationTagsBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ensureAllocationTagsIsMutable();
allocationTags_.add(value);
bitField0_ |= 0x00008000;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerTokenIdentifierProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerTokenIdentifierProto)
private static final org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto();
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerTokenIdentifierProto>
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerTokenIdentifierProto>() {
@java.lang.Override
public ContainerTokenIdentifierProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerTokenIdentifierProto> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerTokenIdentifierProto> getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ContainerTokenIdentifierProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ClientToAMTokenIdentifierProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ClientToAMTokenIdentifierProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return Whether the appAttemptId field is set.
*/
boolean hasAppAttemptId();
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return The appAttemptId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId();
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder();
/**
* <code>optional string clientName = 2;</code>
* @return Whether the clientName field is set.
*/
boolean hasClientName();
/**
* <code>optional string clientName = 2;</code>
* @return The clientName.
*/
java.lang.String getClientName();
/**
* <code>optional string clientName = 2;</code>
* @return The bytes for clientName.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getClientNameBytes();
}
/**
* Protobuf type {@code hadoop.yarn.ClientToAMTokenIdentifierProto}
*/
public static final class ClientToAMTokenIdentifierProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ClientToAMTokenIdentifierProto)
ClientToAMTokenIdentifierProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ClientToAMTokenIdentifierProto.newBuilder() to construct.
private ClientToAMTokenIdentifierProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ClientToAMTokenIdentifierProto() {
clientName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ClientToAMTokenIdentifierProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_ClientToAMTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_ClientToAMTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto.Builder.class);
}
private int bitField0_;
public static final int APPATTEMPTID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto appAttemptId_;
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return Whether the appAttemptId field is set.
*/
@java.lang.Override
public boolean hasAppAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return The appAttemptId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId() {
return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder() {
return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
}
public static final int CLIENTNAME_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object clientName_ = "";
/**
* <code>optional string clientName = 2;</code>
* @return Whether the clientName field is set.
*/
@java.lang.Override
public boolean hasClientName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional string clientName = 2;</code>
* @return The clientName.
*/
@java.lang.Override
public java.lang.String getClientName() {
java.lang.Object ref = clientName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
clientName_ = s;
}
return s;
}
}
/**
* <code>optional string clientName = 2;</code>
* @return The bytes for clientName.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getClientNameBytes() {
java.lang.Object ref = clientName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clientName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppAttemptId());
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, clientName_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppAttemptId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, clientName_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto other = (org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto) obj;
if (hasAppAttemptId() != other.hasAppAttemptId()) return false;
if (hasAppAttemptId()) {
if (!getAppAttemptId()
.equals(other.getAppAttemptId())) return false;
}
if (hasClientName() != other.hasClientName()) return false;
if (hasClientName()) {
if (!getClientName()
.equals(other.getClientName())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppAttemptId()) {
hash = (37 * hash) + APPATTEMPTID_FIELD_NUMBER;
hash = (53 * hash) + getAppAttemptId().hashCode();
}
if (hasClientName()) {
hash = (37 * hash) + CLIENTNAME_FIELD_NUMBER;
hash = (53 * hash) + getClientName().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.ClientToAMTokenIdentifierProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ClientToAMTokenIdentifierProto)
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_ClientToAMTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_ClientToAMTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppAttemptIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appAttemptId_ = null;
if (appAttemptIdBuilder_ != null) {
appAttemptIdBuilder_.dispose();
appAttemptIdBuilder_ = null;
}
clientName_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_ClientToAMTokenIdentifierProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto build() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto result = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appAttemptId_ = appAttemptIdBuilder_ == null
? appAttemptId_
: appAttemptIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.clientName_ = clientName_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto.getDefaultInstance()) return this;
if (other.hasAppAttemptId()) {
mergeAppAttemptId(other.getAppAttemptId());
}
if (other.hasClientName()) {
clientName_ = other.clientName_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppAttemptIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
clientName_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto appAttemptId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> appAttemptIdBuilder_;
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return Whether the appAttemptId field is set.
*/
public boolean hasAppAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
* @return The appAttemptId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId() {
if (appAttemptIdBuilder_ == null) {
return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
} else {
return appAttemptIdBuilder_.getMessage();
}
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder setAppAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
if (appAttemptIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appAttemptId_ = value;
} else {
appAttemptIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder setAppAttemptId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
if (appAttemptIdBuilder_ == null) {
appAttemptId_ = builderForValue.build();
} else {
appAttemptIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder mergeAppAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
if (appAttemptIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appAttemptId_ != null &&
appAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
getAppAttemptIdBuilder().mergeFrom(value);
} else {
appAttemptId_ = value;
}
} else {
appAttemptIdBuilder_.mergeFrom(value);
}
if (appAttemptId_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public Builder clearAppAttemptId() {
bitField0_ = (bitField0_ & ~0x00000001);
appAttemptId_ = null;
if (appAttemptIdBuilder_ != null) {
appAttemptIdBuilder_.dispose();
appAttemptIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getAppAttemptIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppAttemptIdFieldBuilder().getBuilder();
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder() {
if (appAttemptIdBuilder_ != null) {
return appAttemptIdBuilder_.getMessageOrBuilder();
} else {
return appAttemptId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
}
}
/**
* <code>optional .hadoop.yarn.ApplicationAttemptIdProto appAttemptId = 1;</code>
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>
getAppAttemptIdFieldBuilder() {
if (appAttemptIdBuilder_ == null) {
appAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
getAppAttemptId(),
getParentForChildren(),
isClean());
appAttemptId_ = null;
}
return appAttemptIdBuilder_;
}
private java.lang.Object clientName_ = "";
/**
* <code>optional string clientName = 2;</code>
* @return Whether the clientName field is set.
*/
public boolean hasClientName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional string clientName = 2;</code>
* @return The clientName.
*/
public java.lang.String getClientName() {
java.lang.Object ref = clientName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
clientName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string clientName = 2;</code>
* @return The bytes for clientName.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getClientNameBytes() {
java.lang.Object ref = clientName_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clientName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* <code>optional string clientName = 2;</code>
* @param value The clientName to set.
* @return This builder for chaining.
*/
public Builder setClientName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
clientName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <code>optional string clientName = 2;</code>
* @return This builder for chaining.
*/
public Builder clearClientName() {
clientName_ = getDefaultInstance().getClientName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <code>optional string clientName = 2;</code>
* @param value The bytes for clientName to set.
* @return This builder for chaining.
*/
public Builder setClientNameBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
clientName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ClientToAMTokenIdentifierProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ClientToAMTokenIdentifierProto)
private static final org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto();
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ClientToAMTokenIdentifierProto>
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ClientToAMTokenIdentifierProto>() {
@java.lang.Override
public ClientToAMTokenIdentifierProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser<ClientToAMTokenIdentifierProto> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser<ClientToAMTokenIdentifierProto> getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.ClientToAMTokenIdentifierProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface YARNDelegationTokenIdentifierProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.YARNDelegationTokenIdentifierProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* <code>optional string owner = 1;</code>
* @return Whether the owner field is set.
*/
boolean hasOwner();
/**
* <code>optional string owner = 1;</code>
* @return The owner.
*/
java.lang.String getOwner();
/**
* <code>optional string owner = 1;</code>
* @return The bytes for owner.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getOwnerBytes();
/**
* <code>optional string renewer = 2;</code>
* @return Whether the renewer field is set.
*/
boolean hasRenewer();
/**
* <code>optional string renewer = 2;</code>
* @return The renewer.
*/
java.lang.String getRenewer();
/**
* <code>optional string renewer = 2;</code>
* @return The bytes for renewer.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getRenewerBytes();
/**
* <code>optional string realUser = 3;</code>
* @return Whether the realUser field is set.
*/
boolean hasRealUser();
/**
* <code>optional string realUser = 3;</code>
* @return The realUser.
*/
java.lang.String getRealUser();
/**
* <code>optional string realUser = 3;</code>
* @return The bytes for realUser.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getRealUserBytes();
/**
* <code>optional int64 issueDate = 4;</code>
* @return Whether the issueDate field is set.
*/
boolean hasIssueDate();
/**
* <code>optional int64 issueDate = 4;</code>
* @return The issueDate.
*/
long getIssueDate();
/**
* <code>optional int64 maxDate = 5;</code>
* @return Whether the maxDate field is set.
*/
boolean hasMaxDate();
/**
* <code>optional int64 maxDate = 5;</code>
* @return The maxDate.
*/
long getMaxDate();
/**
* <code>optional int32 sequenceNumber = 6;</code>
* @return Whether the sequenceNumber field is set.
*/
boolean hasSequenceNumber();
/**
* <code>optional int32 sequenceNumber = 6;</code>
* @return The sequenceNumber.
*/
int getSequenceNumber();
/**
* <code>optional int32 masterKeyId = 7;</code>
* @return Whether the masterKeyId field is set.
*/
boolean hasMasterKeyId();
/**
* <code>optional int32 masterKeyId = 7;</code>
* @return The masterKeyId.
*/
int getMasterKeyId();
}
/**
* Protobuf type {@code hadoop.yarn.YARNDelegationTokenIdentifierProto}
*/
public static final class YARNDelegationTokenIdentifierProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.YARNDelegationTokenIdentifierProto)
YARNDelegationTokenIdentifierProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use YARNDelegationTokenIdentifierProto.newBuilder() to construct.
private YARNDelegationTokenIdentifierProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private YARNDelegationTokenIdentifierProto() {
owner_ = "";
renewer_ = "";
realUser_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new YARNDelegationTokenIdentifierProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_YARNDelegationTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_YARNDelegationTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto.Builder.class);
}
private int bitField0_;
public static final int OWNER_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object owner_ = "";
/**
* <code>optional string owner = 1;</code>
* @return Whether the owner field is set.
*/
@java.lang.Override
public boolean hasOwner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional string owner = 1;</code>
* @return The owner.
*/
@java.lang.Override
public java.lang.String getOwner() {
java.lang.Object ref = owner_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
owner_ = s;
}
return s;
}
}
/**
* <code>optional string owner = 1;</code>
* @return The bytes for owner.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getOwnerBytes() {
java.lang.Object ref = owner_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
owner_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int RENEWER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object renewer_ = "";
/**
* <code>optional string renewer = 2;</code>
* @return Whether the renewer field is set.
*/
@java.lang.Override
public boolean hasRenewer() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional string renewer = 2;</code>
* @return The renewer.
*/
@java.lang.Override
public java.lang.String getRenewer() {
java.lang.Object ref = renewer_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
renewer_ = s;
}
return s;
}
}
/**
* <code>optional string renewer = 2;</code>
* @return The bytes for renewer.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRenewerBytes() {
java.lang.Object ref = renewer_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
renewer_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int REALUSER_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object realUser_ = "";
/**
* <code>optional string realUser = 3;</code>
* @return Whether the realUser field is set.
*/
@java.lang.Override
public boolean hasRealUser() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <code>optional string realUser = 3;</code>
* @return The realUser.
*/
@java.lang.Override
public java.lang.String getRealUser() {
java.lang.Object ref = realUser_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
realUser_ = s;
}
return s;
}
}
/**
* <code>optional string realUser = 3;</code>
* @return The bytes for realUser.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRealUserBytes() {
java.lang.Object ref = realUser_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
realUser_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int ISSUEDATE_FIELD_NUMBER = 4;
private long issueDate_ = 0L;
/**
* <code>optional int64 issueDate = 4;</code>
* @return Whether the issueDate field is set.
*/
@java.lang.Override
public boolean hasIssueDate() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <code>optional int64 issueDate = 4;</code>
* @return The issueDate.
*/
@java.lang.Override
public long getIssueDate() {
return issueDate_;
}
public static final int MAXDATE_FIELD_NUMBER = 5;
private long maxDate_ = 0L;
/**
* <code>optional int64 maxDate = 5;</code>
* @return Whether the maxDate field is set.
*/
@java.lang.Override
public boolean hasMaxDate() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* <code>optional int64 maxDate = 5;</code>
* @return The maxDate.
*/
@java.lang.Override
public long getMaxDate() {
return maxDate_;
}
public static final int SEQUENCENUMBER_FIELD_NUMBER = 6;
private int sequenceNumber_ = 0;
/**
* <code>optional int32 sequenceNumber = 6;</code>
* @return Whether the sequenceNumber field is set.
*/
@java.lang.Override
public boolean hasSequenceNumber() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* <code>optional int32 sequenceNumber = 6;</code>
* @return The sequenceNumber.
*/
@java.lang.Override
public int getSequenceNumber() {
return sequenceNumber_;
}
public static final int MASTERKEYID_FIELD_NUMBER = 7;
private int masterKeyId_ = 0;
/**
* <code>optional int32 masterKeyId = 7;</code>
* @return Whether the masterKeyId field is set.
*/
@java.lang.Override
public boolean hasMasterKeyId() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* <code>optional int32 masterKeyId = 7;</code>
* @return The masterKeyId.
*/
@java.lang.Override
public int getMasterKeyId() {
return masterKeyId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, owner_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, renewer_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, realUser_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeInt64(4, issueDate_);
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeInt64(5, maxDate_);
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeInt32(6, sequenceNumber_);
}
if (((bitField0_ & 0x00000040) != 0)) {
output.writeInt32(7, masterKeyId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, owner_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, renewer_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, realUser_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(4, issueDate_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(5, maxDate_);
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(6, sequenceNumber_);
}
if (((bitField0_ & 0x00000040) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(7, masterKeyId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto other = (org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto) obj;
if (hasOwner() != other.hasOwner()) return false;
if (hasOwner()) {
if (!getOwner()
.equals(other.getOwner())) return false;
}
if (hasRenewer() != other.hasRenewer()) return false;
if (hasRenewer()) {
if (!getRenewer()
.equals(other.getRenewer())) return false;
}
if (hasRealUser() != other.hasRealUser()) return false;
if (hasRealUser()) {
if (!getRealUser()
.equals(other.getRealUser())) return false;
}
if (hasIssueDate() != other.hasIssueDate()) return false;
if (hasIssueDate()) {
if (getIssueDate()
!= other.getIssueDate()) return false;
}
if (hasMaxDate() != other.hasMaxDate()) return false;
if (hasMaxDate()) {
if (getMaxDate()
!= other.getMaxDate()) return false;
}
if (hasSequenceNumber() != other.hasSequenceNumber()) return false;
if (hasSequenceNumber()) {
if (getSequenceNumber()
!= other.getSequenceNumber()) return false;
}
if (hasMasterKeyId() != other.hasMasterKeyId()) return false;
if (hasMasterKeyId()) {
if (getMasterKeyId()
!= other.getMasterKeyId()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasOwner()) {
hash = (37 * hash) + OWNER_FIELD_NUMBER;
hash = (53 * hash) + getOwner().hashCode();
}
if (hasRenewer()) {
hash = (37 * hash) + RENEWER_FIELD_NUMBER;
hash = (53 * hash) + getRenewer().hashCode();
}
if (hasRealUser()) {
hash = (37 * hash) + REALUSER_FIELD_NUMBER;
hash = (53 * hash) + getRealUser().hashCode();
}
if (hasIssueDate()) {
hash = (37 * hash) + ISSUEDATE_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getIssueDate());
}
if (hasMaxDate()) {
hash = (37 * hash) + MAXDATE_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getMaxDate());
}
if (hasSequenceNumber()) {
hash = (37 * hash) + SEQUENCENUMBER_FIELD_NUMBER;
hash = (53 * hash) + getSequenceNumber();
}
if (hasMasterKeyId()) {
hash = (37 * hash) + MASTERKEYID_FIELD_NUMBER;
hash = (53 * hash) + getMasterKeyId();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.YARNDelegationTokenIdentifierProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.YARNDelegationTokenIdentifierProto)
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_YARNDelegationTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_YARNDelegationTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
owner_ = "";
renewer_ = "";
realUser_ = "";
issueDate_ = 0L;
maxDate_ = 0L;
sequenceNumber_ = 0;
masterKeyId_ = 0;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_YARNDelegationTokenIdentifierProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto build() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto result = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.owner_ = owner_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.renewer_ = renewer_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.realUser_ = realUser_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.issueDate_ = issueDate_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.maxDate_ = maxDate_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.sequenceNumber_ = sequenceNumber_;
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.masterKeyId_ = masterKeyId_;
to_bitField0_ |= 0x00000040;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto.getDefaultInstance()) return this;
if (other.hasOwner()) {
owner_ = other.owner_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasRenewer()) {
renewer_ = other.renewer_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasRealUser()) {
realUser_ = other.realUser_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.hasIssueDate()) {
setIssueDate(other.getIssueDate());
}
if (other.hasMaxDate()) {
setMaxDate(other.getMaxDate());
}
if (other.hasSequenceNumber()) {
setSequenceNumber(other.getSequenceNumber());
}
if (other.hasMasterKeyId()) {
setMasterKeyId(other.getMasterKeyId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
owner_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
renewer_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
realUser_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32: {
issueDate_ = input.readInt64();
bitField0_ |= 0x00000008;
break;
} // case 32
case 40: {
maxDate_ = input.readInt64();
bitField0_ |= 0x00000010;
break;
} // case 40
case 48: {
sequenceNumber_ = input.readInt32();
bitField0_ |= 0x00000020;
break;
} // case 48
case 56: {
masterKeyId_ = input.readInt32();
bitField0_ |= 0x00000040;
break;
} // case 56
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object owner_ = "";
/**
* <code>optional string owner = 1;</code>
* @return Whether the owner field is set.
*/
public boolean hasOwner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional string owner = 1;</code>
* @return The owner.
*/
public java.lang.String getOwner() {
java.lang.Object ref = owner_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
owner_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string owner = 1;</code>
* @return The bytes for owner.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getOwnerBytes() {
java.lang.Object ref = owner_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
owner_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* <code>optional string owner = 1;</code>
* @param value The owner to set.
* @return This builder for chaining.
*/
public Builder setOwner(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
owner_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>optional string owner = 1;</code>
* @return This builder for chaining.
*/
public Builder clearOwner() {
owner_ = getDefaultInstance().getOwner();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>optional string owner = 1;</code>
* @param value The bytes for owner to set.
* @return This builder for chaining.
*/
public Builder setOwnerBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
owner_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object renewer_ = "";
/**
* <code>optional string renewer = 2;</code>
* @return Whether the renewer field is set.
*/
public boolean hasRenewer() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional string renewer = 2;</code>
* @return The renewer.
*/
public java.lang.String getRenewer() {
java.lang.Object ref = renewer_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
renewer_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string renewer = 2;</code>
* @return The bytes for renewer.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRenewerBytes() {
java.lang.Object ref = renewer_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
renewer_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* <code>optional string renewer = 2;</code>
* @param value The renewer to set.
* @return This builder for chaining.
*/
public Builder setRenewer(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
renewer_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <code>optional string renewer = 2;</code>
* @return This builder for chaining.
*/
public Builder clearRenewer() {
renewer_ = getDefaultInstance().getRenewer();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <code>optional string renewer = 2;</code>
* @param value The bytes for renewer to set.
* @return This builder for chaining.
*/
public Builder setRenewerBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
renewer_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object realUser_ = "";
/**
* <code>optional string realUser = 3;</code>
* @return Whether the realUser field is set.
*/
public boolean hasRealUser() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <code>optional string realUser = 3;</code>
* @return The realUser.
*/
public java.lang.String getRealUser() {
java.lang.Object ref = realUser_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
realUser_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string realUser = 3;</code>
* @return The bytes for realUser.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRealUserBytes() {
java.lang.Object ref = realUser_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
realUser_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* <code>optional string realUser = 3;</code>
* @param value The realUser to set.
* @return This builder for chaining.
*/
public Builder setRealUser(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
realUser_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <code>optional string realUser = 3;</code>
* @return This builder for chaining.
*/
public Builder clearRealUser() {
realUser_ = getDefaultInstance().getRealUser();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <code>optional string realUser = 3;</code>
* @param value The bytes for realUser to set.
* @return This builder for chaining.
*/
public Builder setRealUserBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
realUser_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private long issueDate_ ;
/**
* <code>optional int64 issueDate = 4;</code>
* @return Whether the issueDate field is set.
*/
@java.lang.Override
public boolean hasIssueDate() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <code>optional int64 issueDate = 4;</code>
* @return The issueDate.
*/
@java.lang.Override
public long getIssueDate() {
return issueDate_;
}
/**
* <code>optional int64 issueDate = 4;</code>
* @param value The issueDate to set.
* @return This builder for chaining.
*/
public Builder setIssueDate(long value) {
issueDate_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <code>optional int64 issueDate = 4;</code>
* @return This builder for chaining.
*/
public Builder clearIssueDate() {
bitField0_ = (bitField0_ & ~0x00000008);
issueDate_ = 0L;
onChanged();
return this;
}
private long maxDate_ ;
/**
* <code>optional int64 maxDate = 5;</code>
* @return Whether the maxDate field is set.
*/
@java.lang.Override
public boolean hasMaxDate() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* <code>optional int64 maxDate = 5;</code>
* @return The maxDate.
*/
@java.lang.Override
public long getMaxDate() {
return maxDate_;
}
/**
* <code>optional int64 maxDate = 5;</code>
* @param value The maxDate to set.
* @return This builder for chaining.
*/
public Builder setMaxDate(long value) {
maxDate_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* <code>optional int64 maxDate = 5;</code>
* @return This builder for chaining.
*/
public Builder clearMaxDate() {
bitField0_ = (bitField0_ & ~0x00000010);
maxDate_ = 0L;
onChanged();
return this;
}
private int sequenceNumber_ ;
/**
* <code>optional int32 sequenceNumber = 6;</code>
* @return Whether the sequenceNumber field is set.
*/
@java.lang.Override
public boolean hasSequenceNumber() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* <code>optional int32 sequenceNumber = 6;</code>
* @return The sequenceNumber.
*/
@java.lang.Override
public int getSequenceNumber() {
return sequenceNumber_;
}
/**
* <code>optional int32 sequenceNumber = 6;</code>
* @param value The sequenceNumber to set.
* @return This builder for chaining.
*/
public Builder setSequenceNumber(int value) {
sequenceNumber_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
* <code>optional int32 sequenceNumber = 6;</code>
* @return This builder for chaining.
*/
public Builder clearSequenceNumber() {
bitField0_ = (bitField0_ & ~0x00000020);
sequenceNumber_ = 0;
onChanged();
return this;
}
private int masterKeyId_ ;
/**
* <code>optional int32 masterKeyId = 7;</code>
* @return Whether the masterKeyId field is set.
*/
@java.lang.Override
public boolean hasMasterKeyId() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* <code>optional int32 masterKeyId = 7;</code>
* @return The masterKeyId.
*/
@java.lang.Override
public int getMasterKeyId() {
return masterKeyId_;
}
/**
* <code>optional int32 masterKeyId = 7;</code>
* @param value The masterKeyId to set.
* @return This builder for chaining.
*/
public Builder setMasterKeyId(int value) {
masterKeyId_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* <code>optional int32 masterKeyId = 7;</code>
* @return This builder for chaining.
*/
public Builder clearMasterKeyId() {
bitField0_ = (bitField0_ & ~0x00000040);
masterKeyId_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.YARNDelegationTokenIdentifierProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.YARNDelegationTokenIdentifierProto)
private static final org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto();
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<YARNDelegationTokenIdentifierProto>
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<YARNDelegationTokenIdentifierProto>() {
@java.lang.Override
public YARNDelegationTokenIdentifierProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser<YARNDelegationTokenIdentifierProto> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser<YARNDelegationTokenIdentifierProto> getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DockerCredentialTokenIdentifierProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DockerCredentialTokenIdentifierProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* <code>optional string registryUrl = 1;</code>
* @return Whether the registryUrl field is set.
*/
boolean hasRegistryUrl();
/**
* <code>optional string registryUrl = 1;</code>
* @return The registryUrl.
*/
java.lang.String getRegistryUrl();
/**
* <code>optional string registryUrl = 1;</code>
* @return The bytes for registryUrl.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getRegistryUrlBytes();
/**
* <code>optional string applicationId = 2;</code>
* @return Whether the applicationId field is set.
*/
boolean hasApplicationId();
/**
* <code>optional string applicationId = 2;</code>
* @return The applicationId.
*/
java.lang.String getApplicationId();
/**
* <code>optional string applicationId = 2;</code>
* @return The bytes for applicationId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getApplicationIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.DockerCredentialTokenIdentifierProto}
*/
public static final class DockerCredentialTokenIdentifierProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DockerCredentialTokenIdentifierProto)
DockerCredentialTokenIdentifierProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DockerCredentialTokenIdentifierProto.newBuilder() to construct.
private DockerCredentialTokenIdentifierProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DockerCredentialTokenIdentifierProto() {
registryUrl_ = "";
applicationId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DockerCredentialTokenIdentifierProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_DockerCredentialTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_DockerCredentialTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto.Builder.class);
}
private int bitField0_;
public static final int REGISTRYURL_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object registryUrl_ = "";
/**
* <code>optional string registryUrl = 1;</code>
* @return Whether the registryUrl field is set.
*/
@java.lang.Override
public boolean hasRegistryUrl() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional string registryUrl = 1;</code>
* @return The registryUrl.
*/
@java.lang.Override
public java.lang.String getRegistryUrl() {
java.lang.Object ref = registryUrl_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
registryUrl_ = s;
}
return s;
}
}
/**
* <code>optional string registryUrl = 1;</code>
* @return The bytes for registryUrl.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRegistryUrlBytes() {
java.lang.Object ref = registryUrl_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
registryUrl_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int APPLICATIONID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object applicationId_ = "";
/**
* <code>optional string applicationId = 2;</code>
* @return Whether the applicationId field is set.
*/
@java.lang.Override
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional string applicationId = 2;</code>
* @return The applicationId.
*/
@java.lang.Override
public java.lang.String getApplicationId() {
java.lang.Object ref = applicationId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
applicationId_ = s;
}
return s;
}
}
/**
* <code>optional string applicationId = 2;</code>
* @return The bytes for applicationId.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getApplicationIdBytes() {
java.lang.Object ref = applicationId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
applicationId_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, registryUrl_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, applicationId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, registryUrl_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, applicationId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto other = (org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto) obj;
if (hasRegistryUrl() != other.hasRegistryUrl()) return false;
if (hasRegistryUrl()) {
if (!getRegistryUrl()
.equals(other.getRegistryUrl())) return false;
}
if (hasApplicationId() != other.hasApplicationId()) return false;
if (hasApplicationId()) {
if (!getApplicationId()
.equals(other.getApplicationId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasRegistryUrl()) {
hash = (37 * hash) + REGISTRYURL_FIELD_NUMBER;
hash = (53 * hash) + getRegistryUrl().hashCode();
}
if (hasApplicationId()) {
hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER;
hash = (53 * hash) + getApplicationId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DockerCredentialTokenIdentifierProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DockerCredentialTokenIdentifierProto)
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_DockerCredentialTokenIdentifierProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_DockerCredentialTokenIdentifierProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto.class, org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
registryUrl_ = "";
applicationId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.internal_static_hadoop_yarn_DockerCredentialTokenIdentifierProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto build() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto result = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.registryUrl_ = registryUrl_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.applicationId_ = applicationId_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto.getDefaultInstance()) return this;
if (other.hasRegistryUrl()) {
registryUrl_ = other.registryUrl_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasApplicationId()) {
applicationId_ = other.applicationId_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
registryUrl_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
applicationId_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object registryUrl_ = "";
/**
* <code>optional string registryUrl = 1;</code>
* @return Whether the registryUrl field is set.
*/
public boolean hasRegistryUrl() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>optional string registryUrl = 1;</code>
* @return The registryUrl.
*/
public java.lang.String getRegistryUrl() {
java.lang.Object ref = registryUrl_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
registryUrl_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string registryUrl = 1;</code>
* @return The bytes for registryUrl.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRegistryUrlBytes() {
java.lang.Object ref = registryUrl_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
registryUrl_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* <code>optional string registryUrl = 1;</code>
* @param value The registryUrl to set.
* @return This builder for chaining.
*/
public Builder setRegistryUrl(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
registryUrl_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>optional string registryUrl = 1;</code>
* @return This builder for chaining.
*/
public Builder clearRegistryUrl() {
registryUrl_ = getDefaultInstance().getRegistryUrl();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>optional string registryUrl = 1;</code>
* @param value The bytes for registryUrl to set.
* @return This builder for chaining.
*/
public Builder setRegistryUrlBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
registryUrl_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object applicationId_ = "";
/**
* <code>optional string applicationId = 2;</code>
* @return Whether the applicationId field is set.
*/
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional string applicationId = 2;</code>
* @return The applicationId.
*/
public java.lang.String getApplicationId() {
java.lang.Object ref = applicationId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
applicationId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string applicationId = 2;</code>
* @return The bytes for applicationId.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getApplicationIdBytes() {
java.lang.Object ref = applicationId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
applicationId_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* <code>optional string applicationId = 2;</code>
* @param value The applicationId to set.
* @return This builder for chaining.
*/
public Builder setApplicationId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
applicationId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <code>optional string applicationId = 2;</code>
* @return This builder for chaining.
*/
public Builder clearApplicationId() {
applicationId_ = getDefaultInstance().getApplicationId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <code>optional string applicationId = 2;</code>
* @param value The bytes for applicationId to set.
* @return This builder for chaining.
*/
public Builder setApplicationIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
applicationId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DockerCredentialTokenIdentifierProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DockerCredentialTokenIdentifierProto)
private static final org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto();
}
public static org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<DockerCredentialTokenIdentifierProto>
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<DockerCredentialTokenIdentifierProto>() {
@java.lang.Override
public DockerCredentialTokenIdentifierProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser<DockerCredentialTokenIdentifierProto> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser<DockerCredentialTokenIdentifierProto> getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.DockerCredentialTokenIdentifierProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_NMTokenIdentifierProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_NMTokenIdentifierProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_AMRMTokenIdentifierProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_AMRMTokenIdentifierProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_ContainerTokenIdentifierProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_ContainerTokenIdentifierProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_ClientToAMTokenIdentifierProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_ClientToAMTokenIdentifierProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_YARNDelegationTokenIdentifierProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_YARNDelegationTokenIdentifierProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_DockerCredentialTokenIdentifierProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_DockerCredentialTokenIdentifierProto_fieldAccessorTable;
public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\031yarn_security_token.proto\022\013hadoop.yarn" +
"\032\021yarn_protos.proto\"\251\001\n\026NMTokenIdentifie" +
"rProto\022<\n\014appAttemptId\030\001 \001(\0132&.hadoop.ya" +
"rn.ApplicationAttemptIdProto\022(\n\006nodeId\030\002" +
" \001(\0132\030.hadoop.yarn.NodeIdProto\022\024\n\014appSub" +
"mitter\030\003 \001(\t\022\021\n\005keyId\030\004 \001(\005:\002-1\"k\n\030AMRMT" +
"okenIdentifierProto\022<\n\014appAttemptId\030\001 \001(" +
"\0132&.hadoop.yarn.ApplicationAttemptIdProt" +
"o\022\021\n\005keyId\030\002 \001(\005:\002-1\"\350\004\n\035ContainerTokenI" +
"dentifierProto\0222\n\013containerId\030\001 \001(\0132\035.ha" +
"doop.yarn.ContainerIdProto\022\022\n\nnmHostAddr" +
"\030\002 \001(\t\022\024\n\014appSubmitter\030\003 \001(\t\022,\n\010resource" +
"\030\004 \001(\0132\032.hadoop.yarn.ResourceProto\022\027\n\017ex" +
"piryTimeStamp\030\005 \001(\003\022\027\n\013masterKeyId\030\006 \001(\005" +
":\002-1\022\024\n\014rmIdentifier\030\007 \001(\003\022,\n\010priority\030\010" +
" \001(\0132\032.hadoop.yarn.PriorityProto\022\024\n\014crea" +
"tionTime\030\t \001(\003\022F\n\025logAggregationContext\030" +
"\n \001(\0132\'.hadoop.yarn.LogAggregationContex" +
"tProto\022\033\n\023nodeLabelExpression\030\013 \001(\t\0226\n\rc" +
"ontainerType\030\014 \001(\0162\037.hadoop.yarn.Contain" +
"erTypeProto\022B\n\rexecutionType\030\r \001(\0162\037.had" +
"oop.yarn.ExecutionTypeProto:\nGUARANTEED\022" +
"\022\n\007version\030\016 \001(\005:\0010\022!\n\025allocation_reques" +
"t_id\030\017 \001(\003:\002-1\022\027\n\017allocation_tags\030\020 \003(\t\"" +
"r\n\036ClientToAMTokenIdentifierProto\022<\n\014app" +
"AttemptId\030\001 \001(\0132&.hadoop.yarn.Applicatio" +
"nAttemptIdProto\022\022\n\nclientName\030\002 \001(\t\"\247\001\n\"" +
"YARNDelegationTokenIdentifierProto\022\r\n\005ow" +
"ner\030\001 \001(\t\022\017\n\007renewer\030\002 \001(\t\022\020\n\010realUser\030\003" +
" \001(\t\022\021\n\tissueDate\030\004 \001(\003\022\017\n\007maxDate\030\005 \001(\003" +
"\022\026\n\016sequenceNumber\030\006 \001(\005\022\023\n\013masterKeyId\030" +
"\007 \001(\005\"R\n$DockerCredentialTokenIdentifier" +
"Proto\022\023\n\013registryUrl\030\001 \001(\t\022\025\n\rapplicatio" +
"nId\030\002 \001(\tB=\n\034org.apache.hadoop.yarn.prot" +
"oB\027YarnSecurityTokenProtos\210\001\001\240\001\001"
};
descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(),
});
internal_static_hadoop_yarn_NMTokenIdentifierProto_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hadoop_yarn_NMTokenIdentifierProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_NMTokenIdentifierProto_descriptor,
new java.lang.String[] { "AppAttemptId", "NodeId", "AppSubmitter", "KeyId", });
internal_static_hadoop_yarn_AMRMTokenIdentifierProto_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hadoop_yarn_AMRMTokenIdentifierProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_AMRMTokenIdentifierProto_descriptor,
new java.lang.String[] { "AppAttemptId", "KeyId", });
internal_static_hadoop_yarn_ContainerTokenIdentifierProto_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hadoop_yarn_ContainerTokenIdentifierProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_ContainerTokenIdentifierProto_descriptor,
new java.lang.String[] { "ContainerId", "NmHostAddr", "AppSubmitter", "Resource", "ExpiryTimeStamp", "MasterKeyId", "RmIdentifier", "Priority", "CreationTime", "LogAggregationContext", "NodeLabelExpression", "ContainerType", "ExecutionType", "Version", "AllocationRequestId", "AllocationTags", });
internal_static_hadoop_yarn_ClientToAMTokenIdentifierProto_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hadoop_yarn_ClientToAMTokenIdentifierProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_ClientToAMTokenIdentifierProto_descriptor,
new java.lang.String[] { "AppAttemptId", "ClientName", });
internal_static_hadoop_yarn_YARNDelegationTokenIdentifierProto_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hadoop_yarn_YARNDelegationTokenIdentifierProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_YARNDelegationTokenIdentifierProto_descriptor,
new java.lang.String[] { "Owner", "Renewer", "RealUser", "IssueDate", "MaxDate", "SequenceNumber", "MasterKeyId", });
internal_static_hadoop_yarn_DockerCredentialTokenIdentifierProto_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hadoop_yarn_DockerCredentialTokenIdentifierProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_DockerCredentialTokenIdentifierProto_descriptor,
new java.lang.String[] { "RegistryUrl", "ApplicationId", });
org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}