YarnServiceProtos.java

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: yarn_service_protos.proto

// Protobuf Java Version: 3.25.5
package org.apache.hadoop.yarn.proto;

public final class YarnServiceProtos {
  private YarnServiceProtos() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  /**
   * Protobuf enum {@code hadoop.yarn.ContainerUpdateTypeProto}
   */
  public enum ContainerUpdateTypeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>INCREASE_RESOURCE = 0;</code>
     */
    INCREASE_RESOURCE(0),
    /**
     * <code>DECREASE_RESOURCE = 1;</code>
     */
    DECREASE_RESOURCE(1),
    /**
     * <code>PROMOTE_EXECUTION_TYPE = 2;</code>
     */
    PROMOTE_EXECUTION_TYPE(2),
    /**
     * <code>DEMOTE_EXECUTION_TYPE = 3;</code>
     */
    DEMOTE_EXECUTION_TYPE(3),
    ;

    /**
     * <code>INCREASE_RESOURCE = 0;</code>
     */
    public static final int INCREASE_RESOURCE_VALUE = 0;
    /**
     * <code>DECREASE_RESOURCE = 1;</code>
     */
    public static final int DECREASE_RESOURCE_VALUE = 1;
    /**
     * <code>PROMOTE_EXECUTION_TYPE = 2;</code>
     */
    public static final int PROMOTE_EXECUTION_TYPE_VALUE = 2;
    /**
     * <code>DEMOTE_EXECUTION_TYPE = 3;</code>
     */
    public static final int DEMOTE_EXECUTION_TYPE_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ContainerUpdateTypeProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ContainerUpdateTypeProto forNumber(int value) {
      switch (value) {
        case 0: return INCREASE_RESOURCE;
        case 1: return DECREASE_RESOURCE;
        case 2: return PROMOTE_EXECUTION_TYPE;
        case 3: return DEMOTE_EXECUTION_TYPE;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerUpdateTypeProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ContainerUpdateTypeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerUpdateTypeProto>() {
            public ContainerUpdateTypeProto findValueByNumber(int number) {
              return ContainerUpdateTypeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor().getEnumTypes().get(0);
    }

    private static final ContainerUpdateTypeProto[] VALUES = values();

    public static ContainerUpdateTypeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ContainerUpdateTypeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerUpdateTypeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.SchedulerResourceTypes}
   */
  public enum SchedulerResourceTypes
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>MEMORY = 0;</code>
     */
    MEMORY(0),
    /**
     * <code>CPU = 1;</code>
     */
    CPU(1),
    ;

    /**
     * <code>MEMORY = 0;</code>
     */
    public static final int MEMORY_VALUE = 0;
    /**
     * <code>CPU = 1;</code>
     */
    public static final int CPU_VALUE = 1;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static SchedulerResourceTypes valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static SchedulerResourceTypes forNumber(int value) {
      switch (value) {
        case 0: return MEMORY;
        case 1: return CPU;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<SchedulerResourceTypes>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        SchedulerResourceTypes> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<SchedulerResourceTypes>() {
            public SchedulerResourceTypes findValueByNumber(int number) {
              return SchedulerResourceTypes.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor().getEnumTypes().get(1);
    }

    private static final SchedulerResourceTypes[] VALUES = values();

    public static SchedulerResourceTypes valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private SchedulerResourceTypes(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.SchedulerResourceTypes)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ApplicationsRequestScopeProto}
   */
  public enum ApplicationsRequestScopeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>ALL = 0;</code>
     */
    ALL(0),
    /**
     * <code>VIEWABLE = 1;</code>
     */
    VIEWABLE(1),
    /**
     * <code>OWN = 2;</code>
     */
    OWN(2),
    ;

    /**
     * <code>ALL = 0;</code>
     */
    public static final int ALL_VALUE = 0;
    /**
     * <code>VIEWABLE = 1;</code>
     */
    public static final int VIEWABLE_VALUE = 1;
    /**
     * <code>OWN = 2;</code>
     */
    public static final int OWN_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ApplicationsRequestScopeProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ApplicationsRequestScopeProto forNumber(int value) {
      switch (value) {
        case 0: return ALL;
        case 1: return VIEWABLE;
        case 2: return OWN;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ApplicationsRequestScopeProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ApplicationsRequestScopeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ApplicationsRequestScopeProto>() {
            public ApplicationsRequestScopeProto findValueByNumber(int number) {
              return ApplicationsRequestScopeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor().getEnumTypes().get(2);
    }

    private static final ApplicationsRequestScopeProto[] VALUES = values();

    public static ApplicationsRequestScopeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ApplicationsRequestScopeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ApplicationsRequestScopeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.LocalizationStateProto}
   */
  public enum LocalizationStateProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>L_PENDING = 1;</code>
     */
    L_PENDING(1),
    /**
     * <code>L_COMPLETED = 2;</code>
     */
    L_COMPLETED(2),
    /**
     * <code>L_FAILED = 3;</code>
     */
    L_FAILED(3),
    ;

    /**
     * <code>L_PENDING = 1;</code>
     */
    public static final int L_PENDING_VALUE = 1;
    /**
     * <code>L_COMPLETED = 2;</code>
     */
    public static final int L_COMPLETED_VALUE = 2;
    /**
     * <code>L_FAILED = 3;</code>
     */
    public static final int L_FAILED_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static LocalizationStateProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static LocalizationStateProto forNumber(int value) {
      switch (value) {
        case 1: return L_PENDING;
        case 2: return L_COMPLETED;
        case 3: return L_FAILED;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<LocalizationStateProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        LocalizationStateProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<LocalizationStateProto>() {
            public LocalizationStateProto findValueByNumber(int number) {
              return LocalizationStateProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor().getEnumTypes().get(3);
    }

    private static final LocalizationStateProto[] VALUES = values();

    public static LocalizationStateProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private LocalizationStateProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.LocalizationStateProto)
  }

  public interface RegisterApplicationMasterRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.RegisterApplicationMasterRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string host = 1;</code>
     * @return Whether the host field is set.
     */
    boolean hasHost();
    /**
     * <code>optional string host = 1;</code>
     * @return The host.
     */
    java.lang.String getHost();
    /**
     * <code>optional string host = 1;</code>
     * @return The bytes for host.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes();

    /**
     * <code>optional int32 rpc_port = 2;</code>
     * @return Whether the rpcPort field is set.
     */
    boolean hasRpcPort();
    /**
     * <code>optional int32 rpc_port = 2;</code>
     * @return The rpcPort.
     */
    int getRpcPort();

    /**
     * <code>optional string tracking_url = 3;</code>
     * @return Whether the trackingUrl field is set.
     */
    boolean hasTrackingUrl();
    /**
     * <code>optional string tracking_url = 3;</code>
     * @return The trackingUrl.
     */
    java.lang.String getTrackingUrl();
    /**
     * <code>optional string tracking_url = 3;</code>
     * @return The bytes for trackingUrl.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes();

    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto> 
        getPlacementConstraintsList();
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getPlacementConstraints(int index);
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
     */
    int getPlacementConstraintsCount();
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder> 
        getPlacementConstraintsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder getPlacementConstraintsOrBuilder(
        int index);
  }
  /**
   * <pre>
   *&#47;///////////////////////////////////////////////////
   * ///// AM_RM_Protocol ///////////////////////////////
   * ////////////////////////////////////////////////////
   * </pre>
   *
   * Protobuf type {@code hadoop.yarn.RegisterApplicationMasterRequestProto}
   */
  public static final class RegisterApplicationMasterRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.RegisterApplicationMasterRequestProto)
      RegisterApplicationMasterRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use RegisterApplicationMasterRequestProto.newBuilder() to construct.
    private RegisterApplicationMasterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private RegisterApplicationMasterRequestProto() {
      host_ = "";
      trackingUrl_ = "";
      placementConstraints_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new RegisterApplicationMasterRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int HOST_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object host_ = "";
    /**
     * <code>optional string host = 1;</code>
     * @return Whether the host field is set.
     */
    @java.lang.Override
    public boolean hasHost() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string host = 1;</code>
     * @return The host.
     */
    @java.lang.Override
    public java.lang.String getHost() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          host_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string host = 1;</code>
     * @return The bytes for host.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        host_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RPC_PORT_FIELD_NUMBER = 2;
    private int rpcPort_ = 0;
    /**
     * <code>optional int32 rpc_port = 2;</code>
     * @return Whether the rpcPort field is set.
     */
    @java.lang.Override
    public boolean hasRpcPort() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 rpc_port = 2;</code>
     * @return The rpcPort.
     */
    @java.lang.Override
    public int getRpcPort() {
      return rpcPort_;
    }

    public static final int TRACKING_URL_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object trackingUrl_ = "";
    /**
     * <code>optional string tracking_url = 3;</code>
     * @return Whether the trackingUrl field is set.
     */
    @java.lang.Override
    public boolean hasTrackingUrl() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string tracking_url = 3;</code>
     * @return The trackingUrl.
     */
    @java.lang.Override
    public java.lang.String getTrackingUrl() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          trackingUrl_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string tracking_url = 3;</code>
     * @return The bytes for trackingUrl.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        trackingUrl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int PLACEMENT_CONSTRAINTS_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto> placementConstraints_;
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto> getPlacementConstraintsList() {
      return placementConstraints_;
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder> 
        getPlacementConstraintsOrBuilderList() {
      return placementConstraints_;
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
     */
    @java.lang.Override
    public int getPlacementConstraintsCount() {
      return placementConstraints_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getPlacementConstraints(int index) {
      return placementConstraints_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder getPlacementConstraintsOrBuilder(
        int index) {
      return placementConstraints_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getPlacementConstraintsCount(); i++) {
        if (!getPlacementConstraints(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, host_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, rpcPort_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, trackingUrl_);
      }
      for (int i = 0; i < placementConstraints_.size(); i++) {
        output.writeMessage(4, placementConstraints_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, host_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, rpcPort_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, trackingUrl_);
      }
      for (int i = 0; i < placementConstraints_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, placementConstraints_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto) obj;

      if (hasHost() != other.hasHost()) return false;
      if (hasHost()) {
        if (!getHost()
            .equals(other.getHost())) return false;
      }
      if (hasRpcPort() != other.hasRpcPort()) return false;
      if (hasRpcPort()) {
        if (getRpcPort()
            != other.getRpcPort()) return false;
      }
      if (hasTrackingUrl() != other.hasTrackingUrl()) return false;
      if (hasTrackingUrl()) {
        if (!getTrackingUrl()
            .equals(other.getTrackingUrl())) return false;
      }
      if (!getPlacementConstraintsList()
          .equals(other.getPlacementConstraintsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasHost()) {
        hash = (37 * hash) + HOST_FIELD_NUMBER;
        hash = (53 * hash) + getHost().hashCode();
      }
      if (hasRpcPort()) {
        hash = (37 * hash) + RPC_PORT_FIELD_NUMBER;
        hash = (53 * hash) + getRpcPort();
      }
      if (hasTrackingUrl()) {
        hash = (37 * hash) + TRACKING_URL_FIELD_NUMBER;
        hash = (53 * hash) + getTrackingUrl().hashCode();
      }
      if (getPlacementConstraintsCount() > 0) {
        hash = (37 * hash) + PLACEMENT_CONSTRAINTS_FIELD_NUMBER;
        hash = (53 * hash) + getPlacementConstraintsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * <pre>
     *&#47;///////////////////////////////////////////////////
     * ///// AM_RM_Protocol ///////////////////////////////
     * ////////////////////////////////////////////////////
     * </pre>
     *
     * Protobuf type {@code hadoop.yarn.RegisterApplicationMasterRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.RegisterApplicationMasterRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        host_ = "";
        rpcPort_ = 0;
        trackingUrl_ = "";
        if (placementConstraintsBuilder_ == null) {
          placementConstraints_ = java.util.Collections.emptyList();
        } else {
          placementConstraints_ = null;
          placementConstraintsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto result) {
        if (placementConstraintsBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0)) {
            placementConstraints_ = java.util.Collections.unmodifiableList(placementConstraints_);
            bitField0_ = (bitField0_ & ~0x00000008);
          }
          result.placementConstraints_ = placementConstraints_;
        } else {
          result.placementConstraints_ = placementConstraintsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.host_ = host_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.rpcPort_ = rpcPort_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.trackingUrl_ = trackingUrl_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.getDefaultInstance()) return this;
        if (other.hasHost()) {
          host_ = other.host_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasRpcPort()) {
          setRpcPort(other.getRpcPort());
        }
        if (other.hasTrackingUrl()) {
          trackingUrl_ = other.trackingUrl_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (placementConstraintsBuilder_ == null) {
          if (!other.placementConstraints_.isEmpty()) {
            if (placementConstraints_.isEmpty()) {
              placementConstraints_ = other.placementConstraints_;
              bitField0_ = (bitField0_ & ~0x00000008);
            } else {
              ensurePlacementConstraintsIsMutable();
              placementConstraints_.addAll(other.placementConstraints_);
            }
            onChanged();
          }
        } else {
          if (!other.placementConstraints_.isEmpty()) {
            if (placementConstraintsBuilder_.isEmpty()) {
              placementConstraintsBuilder_.dispose();
              placementConstraintsBuilder_ = null;
              placementConstraints_ = other.placementConstraints_;
              bitField0_ = (bitField0_ & ~0x00000008);
              placementConstraintsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getPlacementConstraintsFieldBuilder() : null;
            } else {
              placementConstraintsBuilder_.addAllMessages(other.placementConstraints_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getPlacementConstraintsCount(); i++) {
          if (!getPlacementConstraints(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                host_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                rpcPort_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 26: {
                trackingUrl_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.PARSER,
                        extensionRegistry);
                if (placementConstraintsBuilder_ == null) {
                  ensurePlacementConstraintsIsMutable();
                  placementConstraints_.add(m);
                } else {
                  placementConstraintsBuilder_.addMessage(m);
                }
                break;
              } // case 34
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object host_ = "";
      /**
       * <code>optional string host = 1;</code>
       * @return Whether the host field is set.
       */
      public boolean hasHost() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string host = 1;</code>
       * @return The host.
       */
      public java.lang.String getHost() {
        java.lang.Object ref = host_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            host_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string host = 1;</code>
       * @return The bytes for host.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHostBytes() {
        java.lang.Object ref = host_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          host_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string host = 1;</code>
       * @param value The host to set.
       * @return This builder for chaining.
       */
      public Builder setHost(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        host_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string host = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearHost() {
        host_ = getDefaultInstance().getHost();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string host = 1;</code>
       * @param value The bytes for host to set.
       * @return This builder for chaining.
       */
      public Builder setHostBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        host_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private int rpcPort_ ;
      /**
       * <code>optional int32 rpc_port = 2;</code>
       * @return Whether the rpcPort field is set.
       */
      @java.lang.Override
      public boolean hasRpcPort() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 rpc_port = 2;</code>
       * @return The rpcPort.
       */
      @java.lang.Override
      public int getRpcPort() {
        return rpcPort_;
      }
      /**
       * <code>optional int32 rpc_port = 2;</code>
       * @param value The rpcPort to set.
       * @return This builder for chaining.
       */
      public Builder setRpcPort(int value) {

        rpcPort_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 rpc_port = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearRpcPort() {
        bitField0_ = (bitField0_ & ~0x00000002);
        rpcPort_ = 0;
        onChanged();
        return this;
      }

      private java.lang.Object trackingUrl_ = "";
      /**
       * <code>optional string tracking_url = 3;</code>
       * @return Whether the trackingUrl field is set.
       */
      public boolean hasTrackingUrl() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string tracking_url = 3;</code>
       * @return The trackingUrl.
       */
      public java.lang.String getTrackingUrl() {
        java.lang.Object ref = trackingUrl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            trackingUrl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string tracking_url = 3;</code>
       * @return The bytes for trackingUrl.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTrackingUrlBytes() {
        java.lang.Object ref = trackingUrl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          trackingUrl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string tracking_url = 3;</code>
       * @param value The trackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setTrackingUrl(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        trackingUrl_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string tracking_url = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearTrackingUrl() {
        trackingUrl_ = getDefaultInstance().getTrackingUrl();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string tracking_url = 3;</code>
       * @param value The bytes for trackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setTrackingUrlBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        trackingUrl_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto> placementConstraints_ =
        java.util.Collections.emptyList();
      private void ensurePlacementConstraintsIsMutable() {
        if (!((bitField0_ & 0x00000008) != 0)) {
          placementConstraints_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto>(placementConstraints_);
          bitField0_ |= 0x00000008;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder> placementConstraintsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto> getPlacementConstraintsList() {
        if (placementConstraintsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(placementConstraints_);
        } else {
          return placementConstraintsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public int getPlacementConstraintsCount() {
        if (placementConstraintsBuilder_ == null) {
          return placementConstraints_.size();
        } else {
          return placementConstraintsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getPlacementConstraints(int index) {
        if (placementConstraintsBuilder_ == null) {
          return placementConstraints_.get(index);
        } else {
          return placementConstraintsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public Builder setPlacementConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto value) {
        if (placementConstraintsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensurePlacementConstraintsIsMutable();
          placementConstraints_.set(index, value);
          onChanged();
        } else {
          placementConstraintsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public Builder setPlacementConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder builderForValue) {
        if (placementConstraintsBuilder_ == null) {
          ensurePlacementConstraintsIsMutable();
          placementConstraints_.set(index, builderForValue.build());
          onChanged();
        } else {
          placementConstraintsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public Builder addPlacementConstraints(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto value) {
        if (placementConstraintsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensurePlacementConstraintsIsMutable();
          placementConstraints_.add(value);
          onChanged();
        } else {
          placementConstraintsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public Builder addPlacementConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto value) {
        if (placementConstraintsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensurePlacementConstraintsIsMutable();
          placementConstraints_.add(index, value);
          onChanged();
        } else {
          placementConstraintsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public Builder addPlacementConstraints(
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder builderForValue) {
        if (placementConstraintsBuilder_ == null) {
          ensurePlacementConstraintsIsMutable();
          placementConstraints_.add(builderForValue.build());
          onChanged();
        } else {
          placementConstraintsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public Builder addPlacementConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder builderForValue) {
        if (placementConstraintsBuilder_ == null) {
          ensurePlacementConstraintsIsMutable();
          placementConstraints_.add(index, builderForValue.build());
          onChanged();
        } else {
          placementConstraintsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public Builder addAllPlacementConstraints(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto> values) {
        if (placementConstraintsBuilder_ == null) {
          ensurePlacementConstraintsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, placementConstraints_);
          onChanged();
        } else {
          placementConstraintsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public Builder clearPlacementConstraints() {
        if (placementConstraintsBuilder_ == null) {
          placementConstraints_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000008);
          onChanged();
        } else {
          placementConstraintsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public Builder removePlacementConstraints(int index) {
        if (placementConstraintsBuilder_ == null) {
          ensurePlacementConstraintsIsMutable();
          placementConstraints_.remove(index);
          onChanged();
        } else {
          placementConstraintsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder getPlacementConstraintsBuilder(
          int index) {
        return getPlacementConstraintsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder getPlacementConstraintsOrBuilder(
          int index) {
        if (placementConstraintsBuilder_ == null) {
          return placementConstraints_.get(index);  } else {
          return placementConstraintsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder> 
           getPlacementConstraintsOrBuilderList() {
        if (placementConstraintsBuilder_ != null) {
          return placementConstraintsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(placementConstraints_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder addPlacementConstraintsBuilder() {
        return getPlacementConstraintsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder addPlacementConstraintsBuilder(
          int index) {
        return getPlacementConstraintsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder> 
           getPlacementConstraintsBuilderList() {
        return getPlacementConstraintsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder> 
          getPlacementConstraintsFieldBuilder() {
        if (placementConstraintsBuilder_ == null) {
          placementConstraintsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder>(
                  placementConstraints_,
                  ((bitField0_ & 0x00000008) != 0),
                  getParentForChildren(),
                  isClean());
          placementConstraints_ = null;
        }
        return placementConstraintsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.RegisterApplicationMasterRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.RegisterApplicationMasterRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<RegisterApplicationMasterRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<RegisterApplicationMasterRequestProto>() {
      @java.lang.Override
      public RegisterApplicationMasterRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<RegisterApplicationMasterRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<RegisterApplicationMasterRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface RegisterApplicationMasterResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.RegisterApplicationMasterResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
     * @return Whether the maximumCapability field is set.
     */
    boolean hasMaximumCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
     * @return The maximumCapability.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder();

    /**
     * <code>optional bytes client_to_am_token_master_key = 2;</code>
     * @return Whether the clientToAmTokenMasterKey field is set.
     */
    boolean hasClientToAmTokenMasterKey();
    /**
     * <code>optional bytes client_to_am_token_master_key = 2;</code>
     * @return The clientToAmTokenMasterKey.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString getClientToAmTokenMasterKey();

    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> 
        getApplicationACLsList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index);
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
     */
    int getApplicationACLsCount();
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
        getApplicationACLsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> 
        getContainersFromPreviousAttemptsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
     */
    int getContainersFromPreviousAttemptsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
        getContainersFromPreviousAttemptsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder(
        int index);

    /**
     * <code>optional string queue = 5;</code>
     * @return Whether the queue field is set.
     */
    boolean hasQueue();
    /**
     * <code>optional string queue = 5;</code>
     * @return The queue.
     */
    java.lang.String getQueue();
    /**
     * <code>optional string queue = 5;</code>
     * @return The bytes for queue.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes();

    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> 
        getNmTokensFromPreviousAttemptsList();
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokensFromPreviousAttempts(int index);
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
     */
    int getNmTokensFromPreviousAttemptsCount();
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> 
        getNmTokensFromPreviousAttemptsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensFromPreviousAttemptsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
     * @return A list containing the schedulerResourceTypes.
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes> getSchedulerResourceTypesList();
    /**
     * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
     * @return The count of schedulerResourceTypes.
     */
    int getSchedulerResourceTypesCount();
    /**
     * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
     * @param index The index of the element to return.
     * @return The schedulerResourceTypes at the given index.
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes getSchedulerResourceTypes(int index);

    /**
     * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
     * @return Whether the resourceProfiles field is set.
     */
    boolean hasResourceProfiles();
    /**
     * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
     * @return The resourceProfiles.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles();
    /**
     * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder();

    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> 
        getResourceTypesList();
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypes(int index);
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
     */
    int getResourceTypesCount();
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> 
        getResourceTypesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.RegisterApplicationMasterResponseProto}
   */
  public static final class RegisterApplicationMasterResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.RegisterApplicationMasterResponseProto)
      RegisterApplicationMasterResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use RegisterApplicationMasterResponseProto.newBuilder() to construct.
    private RegisterApplicationMasterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private RegisterApplicationMasterResponseProto() {
      clientToAmTokenMasterKey_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
      applicationACLs_ = java.util.Collections.emptyList();
      containersFromPreviousAttempts_ = java.util.Collections.emptyList();
      queue_ = "";
      nmTokensFromPreviousAttempts_ = java.util.Collections.emptyList();
      schedulerResourceTypes_ = java.util.Collections.emptyList();
      resourceTypes_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new RegisterApplicationMasterResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int MAXIMUMCAPABILITY_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto maximumCapability_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
     * @return Whether the maximumCapability field is set.
     */
    @java.lang.Override
    public boolean hasMaximumCapability() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
     * @return The maximumCapability.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability() {
      return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder() {
      return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_;
    }

    public static final int CLIENT_TO_AM_TOKEN_MASTER_KEY_FIELD_NUMBER = 2;
    private org.apache.hadoop.thirdparty.protobuf.ByteString clientToAmTokenMasterKey_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
    /**
     * <code>optional bytes client_to_am_token_master_key = 2;</code>
     * @return Whether the clientToAmTokenMasterKey field is set.
     */
    @java.lang.Override
    public boolean hasClientToAmTokenMasterKey() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional bytes client_to_am_token_master_key = 2;</code>
     * @return The clientToAmTokenMasterKey.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString getClientToAmTokenMasterKey() {
      return clientToAmTokenMasterKey_;
    }

    public static final int APPLICATION_ACLS_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> applicationACLs_;
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> getApplicationACLsList() {
      return applicationACLs_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
        getApplicationACLsOrBuilderList() {
      return applicationACLs_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
     */
    @java.lang.Override
    public int getApplicationACLsCount() {
      return applicationACLs_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index) {
      return applicationACLs_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder(
        int index) {
      return applicationACLs_.get(index);
    }

    public static final int CONTAINERS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> containersFromPreviousAttempts_;
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> getContainersFromPreviousAttemptsList() {
      return containersFromPreviousAttempts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
        getContainersFromPreviousAttemptsOrBuilderList() {
      return containersFromPreviousAttempts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
     */
    @java.lang.Override
    public int getContainersFromPreviousAttemptsCount() {
      return containersFromPreviousAttempts_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index) {
      return containersFromPreviousAttempts_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder(
        int index) {
      return containersFromPreviousAttempts_.get(index);
    }

    public static final int QUEUE_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private volatile java.lang.Object queue_ = "";
    /**
     * <code>optional string queue = 5;</code>
     * @return Whether the queue field is set.
     */
    @java.lang.Override
    public boolean hasQueue() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string queue = 5;</code>
     * @return The queue.
     */
    @java.lang.Override
    public java.lang.String getQueue() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queue_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string queue = 5;</code>
     * @return The bytes for queue.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int NM_TOKENS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER = 6;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> nmTokensFromPreviousAttempts_;
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> getNmTokensFromPreviousAttemptsList() {
      return nmTokensFromPreviousAttempts_;
    }
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> 
        getNmTokensFromPreviousAttemptsOrBuilderList() {
      return nmTokensFromPreviousAttempts_;
    }
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
     */
    @java.lang.Override
    public int getNmTokensFromPreviousAttemptsCount() {
      return nmTokensFromPreviousAttempts_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokensFromPreviousAttempts(int index) {
      return nmTokensFromPreviousAttempts_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensFromPreviousAttemptsOrBuilder(
        int index) {
      return nmTokensFromPreviousAttempts_.get(index);
    }

    public static final int SCHEDULER_RESOURCE_TYPES_FIELD_NUMBER = 7;
    @SuppressWarnings("serial")
    private java.util.List<java.lang.Integer> schedulerResourceTypes_;
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter<
        java.lang.Integer, org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes> schedulerResourceTypes_converter_ =
            new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter<
                java.lang.Integer, org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes>() {
              public org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes convert(java.lang.Integer from) {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes result = org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes.forNumber(from);
                return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes.MEMORY : result;
              }
            };
    /**
     * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
     * @return A list containing the schedulerResourceTypes.
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes> getSchedulerResourceTypesList() {
      return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter<
          java.lang.Integer, org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes>(schedulerResourceTypes_, schedulerResourceTypes_converter_);
    }
    /**
     * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
     * @return The count of schedulerResourceTypes.
     */
    @java.lang.Override
    public int getSchedulerResourceTypesCount() {
      return schedulerResourceTypes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
     * @param index The index of the element to return.
     * @return The schedulerResourceTypes at the given index.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes getSchedulerResourceTypes(int index) {
      return schedulerResourceTypes_converter_.convert(schedulerResourceTypes_.get(index));
    }

    public static final int RESOURCE_PROFILES_FIELD_NUMBER = 8;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto resourceProfiles_;
    /**
     * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
     * @return Whether the resourceProfiles field is set.
     */
    @java.lang.Override
    public boolean hasResourceProfiles() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
     * @return The resourceProfiles.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles() {
      return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder() {
      return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_;
    }

    public static final int RESOURCE_TYPES_FIELD_NUMBER = 9;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> resourceTypes_;
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> getResourceTypesList() {
      return resourceTypes_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> 
        getResourceTypesOrBuilderList() {
      return resourceTypes_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
     */
    @java.lang.Override
    public int getResourceTypesCount() {
      return resourceTypes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypes(int index) {
      return resourceTypes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypesOrBuilder(
        int index) {
      return resourceTypes_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasMaximumCapability()) {
        if (!getMaximumCapability().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getContainersFromPreviousAttemptsCount(); i++) {
        if (!getContainersFromPreviousAttempts(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getNmTokensFromPreviousAttemptsCount(); i++) {
        if (!getNmTokensFromPreviousAttempts(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasResourceProfiles()) {
        if (!getResourceProfiles().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getResourceTypesCount(); i++) {
        if (!getResourceTypes(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getMaximumCapability());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeBytes(2, clientToAmTokenMasterKey_);
      }
      for (int i = 0; i < applicationACLs_.size(); i++) {
        output.writeMessage(3, applicationACLs_.get(i));
      }
      for (int i = 0; i < containersFromPreviousAttempts_.size(); i++) {
        output.writeMessage(4, containersFromPreviousAttempts_.get(i));
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, queue_);
      }
      for (int i = 0; i < nmTokensFromPreviousAttempts_.size(); i++) {
        output.writeMessage(6, nmTokensFromPreviousAttempts_.get(i));
      }
      for (int i = 0; i < schedulerResourceTypes_.size(); i++) {
        output.writeEnum(7, schedulerResourceTypes_.get(i));
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(8, getResourceProfiles());
      }
      for (int i = 0; i < resourceTypes_.size(); i++) {
        output.writeMessage(9, resourceTypes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getMaximumCapability());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBytesSize(2, clientToAmTokenMasterKey_);
      }
      for (int i = 0; i < applicationACLs_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, applicationACLs_.get(i));
      }
      for (int i = 0; i < containersFromPreviousAttempts_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, containersFromPreviousAttempts_.get(i));
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, queue_);
      }
      for (int i = 0; i < nmTokensFromPreviousAttempts_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(6, nmTokensFromPreviousAttempts_.get(i));
      }
      {
        int dataSize = 0;
        for (int i = 0; i < schedulerResourceTypes_.size(); i++) {
          dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
            .computeEnumSizeNoTag(schedulerResourceTypes_.get(i));
        }
        size += dataSize;
        size += 1 * schedulerResourceTypes_.size();
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(8, getResourceProfiles());
      }
      for (int i = 0; i < resourceTypes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(9, resourceTypes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto) obj;

      if (hasMaximumCapability() != other.hasMaximumCapability()) return false;
      if (hasMaximumCapability()) {
        if (!getMaximumCapability()
            .equals(other.getMaximumCapability())) return false;
      }
      if (hasClientToAmTokenMasterKey() != other.hasClientToAmTokenMasterKey()) return false;
      if (hasClientToAmTokenMasterKey()) {
        if (!getClientToAmTokenMasterKey()
            .equals(other.getClientToAmTokenMasterKey())) return false;
      }
      if (!getApplicationACLsList()
          .equals(other.getApplicationACLsList())) return false;
      if (!getContainersFromPreviousAttemptsList()
          .equals(other.getContainersFromPreviousAttemptsList())) return false;
      if (hasQueue() != other.hasQueue()) return false;
      if (hasQueue()) {
        if (!getQueue()
            .equals(other.getQueue())) return false;
      }
      if (!getNmTokensFromPreviousAttemptsList()
          .equals(other.getNmTokensFromPreviousAttemptsList())) return false;
      if (!schedulerResourceTypes_.equals(other.schedulerResourceTypes_)) return false;
      if (hasResourceProfiles() != other.hasResourceProfiles()) return false;
      if (hasResourceProfiles()) {
        if (!getResourceProfiles()
            .equals(other.getResourceProfiles())) return false;
      }
      if (!getResourceTypesList()
          .equals(other.getResourceTypesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasMaximumCapability()) {
        hash = (37 * hash) + MAXIMUMCAPABILITY_FIELD_NUMBER;
        hash = (53 * hash) + getMaximumCapability().hashCode();
      }
      if (hasClientToAmTokenMasterKey()) {
        hash = (37 * hash) + CLIENT_TO_AM_TOKEN_MASTER_KEY_FIELD_NUMBER;
        hash = (53 * hash) + getClientToAmTokenMasterKey().hashCode();
      }
      if (getApplicationACLsCount() > 0) {
        hash = (37 * hash) + APPLICATION_ACLS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationACLsList().hashCode();
      }
      if (getContainersFromPreviousAttemptsCount() > 0) {
        hash = (37 * hash) + CONTAINERS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER;
        hash = (53 * hash) + getContainersFromPreviousAttemptsList().hashCode();
      }
      if (hasQueue()) {
        hash = (37 * hash) + QUEUE_FIELD_NUMBER;
        hash = (53 * hash) + getQueue().hashCode();
      }
      if (getNmTokensFromPreviousAttemptsCount() > 0) {
        hash = (37 * hash) + NM_TOKENS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER;
        hash = (53 * hash) + getNmTokensFromPreviousAttemptsList().hashCode();
      }
      if (getSchedulerResourceTypesCount() > 0) {
        hash = (37 * hash) + SCHEDULER_RESOURCE_TYPES_FIELD_NUMBER;
        hash = (53 * hash) + schedulerResourceTypes_.hashCode();
      }
      if (hasResourceProfiles()) {
        hash = (37 * hash) + RESOURCE_PROFILES_FIELD_NUMBER;
        hash = (53 * hash) + getResourceProfiles().hashCode();
      }
      if (getResourceTypesCount() > 0) {
        hash = (37 * hash) + RESOURCE_TYPES_FIELD_NUMBER;
        hash = (53 * hash) + getResourceTypesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.RegisterApplicationMasterResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.RegisterApplicationMasterResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getMaximumCapabilityFieldBuilder();
          getApplicationACLsFieldBuilder();
          getContainersFromPreviousAttemptsFieldBuilder();
          getNmTokensFromPreviousAttemptsFieldBuilder();
          getResourceProfilesFieldBuilder();
          getResourceTypesFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        maximumCapability_ = null;
        if (maximumCapabilityBuilder_ != null) {
          maximumCapabilityBuilder_.dispose();
          maximumCapabilityBuilder_ = null;
        }
        clientToAmTokenMasterKey_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
        if (applicationACLsBuilder_ == null) {
          applicationACLs_ = java.util.Collections.emptyList();
        } else {
          applicationACLs_ = null;
          applicationACLsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        if (containersFromPreviousAttemptsBuilder_ == null) {
          containersFromPreviousAttempts_ = java.util.Collections.emptyList();
        } else {
          containersFromPreviousAttempts_ = null;
          containersFromPreviousAttemptsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        queue_ = "";
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          nmTokensFromPreviousAttempts_ = java.util.Collections.emptyList();
        } else {
          nmTokensFromPreviousAttempts_ = null;
          nmTokensFromPreviousAttemptsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        schedulerResourceTypes_ = java.util.Collections.emptyList();
        bitField0_ = (bitField0_ & ~0x00000040);
        resourceProfiles_ = null;
        if (resourceProfilesBuilder_ != null) {
          resourceProfilesBuilder_.dispose();
          resourceProfilesBuilder_ = null;
        }
        if (resourceTypesBuilder_ == null) {
          resourceTypes_ = java.util.Collections.emptyList();
        } else {
          resourceTypes_ = null;
          resourceTypesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000100);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto result) {
        if (applicationACLsBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0)) {
            applicationACLs_ = java.util.Collections.unmodifiableList(applicationACLs_);
            bitField0_ = (bitField0_ & ~0x00000004);
          }
          result.applicationACLs_ = applicationACLs_;
        } else {
          result.applicationACLs_ = applicationACLsBuilder_.build();
        }
        if (containersFromPreviousAttemptsBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0)) {
            containersFromPreviousAttempts_ = java.util.Collections.unmodifiableList(containersFromPreviousAttempts_);
            bitField0_ = (bitField0_ & ~0x00000008);
          }
          result.containersFromPreviousAttempts_ = containersFromPreviousAttempts_;
        } else {
          result.containersFromPreviousAttempts_ = containersFromPreviousAttemptsBuilder_.build();
        }
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0)) {
            nmTokensFromPreviousAttempts_ = java.util.Collections.unmodifiableList(nmTokensFromPreviousAttempts_);
            bitField0_ = (bitField0_ & ~0x00000020);
          }
          result.nmTokensFromPreviousAttempts_ = nmTokensFromPreviousAttempts_;
        } else {
          result.nmTokensFromPreviousAttempts_ = nmTokensFromPreviousAttemptsBuilder_.build();
        }
        if (((bitField0_ & 0x00000040) != 0)) {
          schedulerResourceTypes_ = java.util.Collections.unmodifiableList(schedulerResourceTypes_);
          bitField0_ = (bitField0_ & ~0x00000040);
        }
        result.schedulerResourceTypes_ = schedulerResourceTypes_;
        if (resourceTypesBuilder_ == null) {
          if (((bitField0_ & 0x00000100) != 0)) {
            resourceTypes_ = java.util.Collections.unmodifiableList(resourceTypes_);
            bitField0_ = (bitField0_ & ~0x00000100);
          }
          result.resourceTypes_ = resourceTypes_;
        } else {
          result.resourceTypes_ = resourceTypesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.maximumCapability_ = maximumCapabilityBuilder_ == null
              ? maximumCapability_
              : maximumCapabilityBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.clientToAmTokenMasterKey_ = clientToAmTokenMasterKey_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.queue_ = queue_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.resourceProfiles_ = resourceProfilesBuilder_ == null
              ? resourceProfiles_
              : resourceProfilesBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance()) return this;
        if (other.hasMaximumCapability()) {
          mergeMaximumCapability(other.getMaximumCapability());
        }
        if (other.hasClientToAmTokenMasterKey()) {
          setClientToAmTokenMasterKey(other.getClientToAmTokenMasterKey());
        }
        if (applicationACLsBuilder_ == null) {
          if (!other.applicationACLs_.isEmpty()) {
            if (applicationACLs_.isEmpty()) {
              applicationACLs_ = other.applicationACLs_;
              bitField0_ = (bitField0_ & ~0x00000004);
            } else {
              ensureApplicationACLsIsMutable();
              applicationACLs_.addAll(other.applicationACLs_);
            }
            onChanged();
          }
        } else {
          if (!other.applicationACLs_.isEmpty()) {
            if (applicationACLsBuilder_.isEmpty()) {
              applicationACLsBuilder_.dispose();
              applicationACLsBuilder_ = null;
              applicationACLs_ = other.applicationACLs_;
              bitField0_ = (bitField0_ & ~0x00000004);
              applicationACLsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationACLsFieldBuilder() : null;
            } else {
              applicationACLsBuilder_.addAllMessages(other.applicationACLs_);
            }
          }
        }
        if (containersFromPreviousAttemptsBuilder_ == null) {
          if (!other.containersFromPreviousAttempts_.isEmpty()) {
            if (containersFromPreviousAttempts_.isEmpty()) {
              containersFromPreviousAttempts_ = other.containersFromPreviousAttempts_;
              bitField0_ = (bitField0_ & ~0x00000008);
            } else {
              ensureContainersFromPreviousAttemptsIsMutable();
              containersFromPreviousAttempts_.addAll(other.containersFromPreviousAttempts_);
            }
            onChanged();
          }
        } else {
          if (!other.containersFromPreviousAttempts_.isEmpty()) {
            if (containersFromPreviousAttemptsBuilder_.isEmpty()) {
              containersFromPreviousAttemptsBuilder_.dispose();
              containersFromPreviousAttemptsBuilder_ = null;
              containersFromPreviousAttempts_ = other.containersFromPreviousAttempts_;
              bitField0_ = (bitField0_ & ~0x00000008);
              containersFromPreviousAttemptsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getContainersFromPreviousAttemptsFieldBuilder() : null;
            } else {
              containersFromPreviousAttemptsBuilder_.addAllMessages(other.containersFromPreviousAttempts_);
            }
          }
        }
        if (other.hasQueue()) {
          queue_ = other.queue_;
          bitField0_ |= 0x00000010;
          onChanged();
        }
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          if (!other.nmTokensFromPreviousAttempts_.isEmpty()) {
            if (nmTokensFromPreviousAttempts_.isEmpty()) {
              nmTokensFromPreviousAttempts_ = other.nmTokensFromPreviousAttempts_;
              bitField0_ = (bitField0_ & ~0x00000020);
            } else {
              ensureNmTokensFromPreviousAttemptsIsMutable();
              nmTokensFromPreviousAttempts_.addAll(other.nmTokensFromPreviousAttempts_);
            }
            onChanged();
          }
        } else {
          if (!other.nmTokensFromPreviousAttempts_.isEmpty()) {
            if (nmTokensFromPreviousAttemptsBuilder_.isEmpty()) {
              nmTokensFromPreviousAttemptsBuilder_.dispose();
              nmTokensFromPreviousAttemptsBuilder_ = null;
              nmTokensFromPreviousAttempts_ = other.nmTokensFromPreviousAttempts_;
              bitField0_ = (bitField0_ & ~0x00000020);
              nmTokensFromPreviousAttemptsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNmTokensFromPreviousAttemptsFieldBuilder() : null;
            } else {
              nmTokensFromPreviousAttemptsBuilder_.addAllMessages(other.nmTokensFromPreviousAttempts_);
            }
          }
        }
        if (!other.schedulerResourceTypes_.isEmpty()) {
          if (schedulerResourceTypes_.isEmpty()) {
            schedulerResourceTypes_ = other.schedulerResourceTypes_;
            bitField0_ = (bitField0_ & ~0x00000040);
          } else {
            ensureSchedulerResourceTypesIsMutable();
            schedulerResourceTypes_.addAll(other.schedulerResourceTypes_);
          }
          onChanged();
        }
        if (other.hasResourceProfiles()) {
          mergeResourceProfiles(other.getResourceProfiles());
        }
        if (resourceTypesBuilder_ == null) {
          if (!other.resourceTypes_.isEmpty()) {
            if (resourceTypes_.isEmpty()) {
              resourceTypes_ = other.resourceTypes_;
              bitField0_ = (bitField0_ & ~0x00000100);
            } else {
              ensureResourceTypesIsMutable();
              resourceTypes_.addAll(other.resourceTypes_);
            }
            onChanged();
          }
        } else {
          if (!other.resourceTypes_.isEmpty()) {
            if (resourceTypesBuilder_.isEmpty()) {
              resourceTypesBuilder_.dispose();
              resourceTypesBuilder_ = null;
              resourceTypes_ = other.resourceTypes_;
              bitField0_ = (bitField0_ & ~0x00000100);
              resourceTypesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getResourceTypesFieldBuilder() : null;
            } else {
              resourceTypesBuilder_.addAllMessages(other.resourceTypes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasMaximumCapability()) {
          if (!getMaximumCapability().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getContainersFromPreviousAttemptsCount(); i++) {
          if (!getContainersFromPreviousAttempts(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getNmTokensFromPreviousAttemptsCount(); i++) {
          if (!getNmTokensFromPreviousAttempts(i).isInitialized()) {
            return false;
          }
        }
        if (hasResourceProfiles()) {
          if (!getResourceProfiles().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getResourceTypesCount(); i++) {
          if (!getResourceTypes(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getMaximumCapabilityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                clientToAmTokenMasterKey_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.PARSER,
                        extensionRegistry);
                if (applicationACLsBuilder_ == null) {
                  ensureApplicationACLsIsMutable();
                  applicationACLs_.add(m);
                } else {
                  applicationACLsBuilder_.addMessage(m);
                }
                break;
              } // case 26
              case 34: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER,
                        extensionRegistry);
                if (containersFromPreviousAttemptsBuilder_ == null) {
                  ensureContainersFromPreviousAttemptsIsMutable();
                  containersFromPreviousAttempts_.add(m);
                } else {
                  containersFromPreviousAttemptsBuilder_.addMessage(m);
                }
                break;
              } // case 34
              case 42: {
                queue_ = input.readBytes();
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 50: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.PARSER,
                        extensionRegistry);
                if (nmTokensFromPreviousAttemptsBuilder_ == null) {
                  ensureNmTokensFromPreviousAttemptsIsMutable();
                  nmTokensFromPreviousAttempts_.add(m);
                } else {
                  nmTokensFromPreviousAttemptsBuilder_.addMessage(m);
                }
                break;
              } // case 50
              case 56: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes tmpValue =
                    org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(7, tmpRaw);
                } else {
                  ensureSchedulerResourceTypesIsMutable();
                  schedulerResourceTypes_.add(tmpRaw);
                }
                break;
              } // case 56
              case 58: {
                int length = input.readRawVarint32();
                int oldLimit = input.pushLimit(length);
                while(input.getBytesUntilLimit() > 0) {
                  int tmpRaw = input.readEnum();
                  org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes tmpValue =
                      org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes.forNumber(tmpRaw);
                  if (tmpValue == null) {
                    mergeUnknownVarintField(7, tmpRaw);
                  } else {
                    ensureSchedulerResourceTypesIsMutable();
                    schedulerResourceTypes_.add(tmpRaw);
                  }
                }
                input.popLimit(oldLimit);
                break;
              } // case 58
              case 66: {
                input.readMessage(
                    getResourceProfilesFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000080;
                break;
              } // case 66
              case 74: {
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.PARSER,
                        extensionRegistry);
                if (resourceTypesBuilder_ == null) {
                  ensureResourceTypesIsMutable();
                  resourceTypes_.add(m);
                } else {
                  resourceTypesBuilder_.addMessage(m);
                }
                break;
              } // case 74
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto maximumCapability_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> maximumCapabilityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
       * @return Whether the maximumCapability field is set.
       */
      public boolean hasMaximumCapability() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
       * @return The maximumCapability.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability() {
        if (maximumCapabilityBuilder_ == null) {
          return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_;
        } else {
          return maximumCapabilityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
       */
      public Builder setMaximumCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (maximumCapabilityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          maximumCapability_ = value;
        } else {
          maximumCapabilityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
       */
      public Builder setMaximumCapability(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (maximumCapabilityBuilder_ == null) {
          maximumCapability_ = builderForValue.build();
        } else {
          maximumCapabilityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
       */
      public Builder mergeMaximumCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (maximumCapabilityBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            maximumCapability_ != null &&
            maximumCapability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getMaximumCapabilityBuilder().mergeFrom(value);
          } else {
            maximumCapability_ = value;
          }
        } else {
          maximumCapabilityBuilder_.mergeFrom(value);
        }
        if (maximumCapability_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
       */
      public Builder clearMaximumCapability() {
        bitField0_ = (bitField0_ & ~0x00000001);
        maximumCapability_ = null;
        if (maximumCapabilityBuilder_ != null) {
          maximumCapabilityBuilder_.dispose();
          maximumCapabilityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getMaximumCapabilityBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getMaximumCapabilityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder() {
        if (maximumCapabilityBuilder_ != null) {
          return maximumCapabilityBuilder_.getMessageOrBuilder();
        } else {
          return maximumCapability_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getMaximumCapabilityFieldBuilder() {
        if (maximumCapabilityBuilder_ == null) {
          maximumCapabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getMaximumCapability(),
                  getParentForChildren(),
                  isClean());
          maximumCapability_ = null;
        }
        return maximumCapabilityBuilder_;
      }

      private org.apache.hadoop.thirdparty.protobuf.ByteString clientToAmTokenMasterKey_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
      /**
       * <code>optional bytes client_to_am_token_master_key = 2;</code>
       * @return Whether the clientToAmTokenMasterKey field is set.
       */
      @java.lang.Override
      public boolean hasClientToAmTokenMasterKey() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional bytes client_to_am_token_master_key = 2;</code>
       * @return The clientToAmTokenMasterKey.
       */
      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.ByteString getClientToAmTokenMasterKey() {
        return clientToAmTokenMasterKey_;
      }
      /**
       * <code>optional bytes client_to_am_token_master_key = 2;</code>
       * @param value The clientToAmTokenMasterKey to set.
       * @return This builder for chaining.
       */
      public Builder setClientToAmTokenMasterKey(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        clientToAmTokenMasterKey_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional bytes client_to_am_token_master_key = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearClientToAmTokenMasterKey() {
        bitField0_ = (bitField0_ & ~0x00000002);
        clientToAmTokenMasterKey_ = getDefaultInstance().getClientToAmTokenMasterKey();
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> applicationACLs_ =
        java.util.Collections.emptyList();
      private void ensureApplicationACLsIsMutable() {
        if (!((bitField0_ & 0x00000004) != 0)) {
          applicationACLs_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto>(applicationACLs_);
          bitField0_ |= 0x00000004;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> applicationACLsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> getApplicationACLsList() {
        if (applicationACLsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applicationACLs_);
        } else {
          return applicationACLsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public int getApplicationACLsCount() {
        if (applicationACLsBuilder_ == null) {
          return applicationACLs_.size();
        } else {
          return applicationACLsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index) {
        if (applicationACLsBuilder_ == null) {
          return applicationACLs_.get(index);
        } else {
          return applicationACLsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public Builder setApplicationACLs(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) {
        if (applicationACLsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationACLsIsMutable();
          applicationACLs_.set(index, value);
          onChanged();
        } else {
          applicationACLsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public Builder setApplicationACLs(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) {
        if (applicationACLsBuilder_ == null) {
          ensureApplicationACLsIsMutable();
          applicationACLs_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationACLsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public Builder addApplicationACLs(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) {
        if (applicationACLsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationACLsIsMutable();
          applicationACLs_.add(value);
          onChanged();
        } else {
          applicationACLsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public Builder addApplicationACLs(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) {
        if (applicationACLsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationACLsIsMutable();
          applicationACLs_.add(index, value);
          onChanged();
        } else {
          applicationACLsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public Builder addApplicationACLs(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) {
        if (applicationACLsBuilder_ == null) {
          ensureApplicationACLsIsMutable();
          applicationACLs_.add(builderForValue.build());
          onChanged();
        } else {
          applicationACLsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public Builder addApplicationACLs(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) {
        if (applicationACLsBuilder_ == null) {
          ensureApplicationACLsIsMutable();
          applicationACLs_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationACLsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public Builder addAllApplicationACLs(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> values) {
        if (applicationACLsBuilder_ == null) {
          ensureApplicationACLsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applicationACLs_);
          onChanged();
        } else {
          applicationACLsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public Builder clearApplicationACLs() {
        if (applicationACLsBuilder_ == null) {
          applicationACLs_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
          onChanged();
        } else {
          applicationACLsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public Builder removeApplicationACLs(int index) {
        if (applicationACLsBuilder_ == null) {
          ensureApplicationACLsIsMutable();
          applicationACLs_.remove(index);
          onChanged();
        } else {
          applicationACLsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder getApplicationACLsBuilder(
          int index) {
        return getApplicationACLsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder(
          int index) {
        if (applicationACLsBuilder_ == null) {
          return applicationACLs_.get(index);  } else {
          return applicationACLsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
           getApplicationACLsOrBuilderList() {
        if (applicationACLsBuilder_ != null) {
          return applicationACLsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applicationACLs_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder addApplicationACLsBuilder() {
        return getApplicationACLsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder addApplicationACLsBuilder(
          int index) {
        return getApplicationACLsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder> 
           getApplicationACLsBuilderList() {
        return getApplicationACLsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
          getApplicationACLsFieldBuilder() {
        if (applicationACLsBuilder_ == null) {
          applicationACLsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder>(
                  applicationACLs_,
                  ((bitField0_ & 0x00000004) != 0),
                  getParentForChildren(),
                  isClean());
          applicationACLs_ = null;
        }
        return applicationACLsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> containersFromPreviousAttempts_ =
        java.util.Collections.emptyList();
      private void ensureContainersFromPreviousAttemptsIsMutable() {
        if (!((bitField0_ & 0x00000008) != 0)) {
          containersFromPreviousAttempts_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto>(containersFromPreviousAttempts_);
          bitField0_ |= 0x00000008;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> containersFromPreviousAttemptsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> getContainersFromPreviousAttemptsList() {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(containersFromPreviousAttempts_);
        } else {
          return containersFromPreviousAttemptsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public int getContainersFromPreviousAttemptsCount() {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          return containersFromPreviousAttempts_.size();
        } else {
          return containersFromPreviousAttemptsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          return containersFromPreviousAttempts_.get(index);
        } else {
          return containersFromPreviousAttemptsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public Builder setContainersFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.set(index, value);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public Builder setContainersFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.set(index, builderForValue.build());
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public Builder addContainersFromPreviousAttempts(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.add(value);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public Builder addContainersFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.add(index, value);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public Builder addContainersFromPreviousAttempts(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.add(builderForValue.build());
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public Builder addContainersFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.add(index, builderForValue.build());
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public Builder addAllContainersFromPreviousAttempts(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> values) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          ensureContainersFromPreviousAttemptsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, containersFromPreviousAttempts_);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public Builder clearContainersFromPreviousAttempts() {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          containersFromPreviousAttempts_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000008);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public Builder removeContainersFromPreviousAttempts(int index) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.remove(index);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getContainersFromPreviousAttemptsBuilder(
          int index) {
        return getContainersFromPreviousAttemptsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder(
          int index) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          return containersFromPreviousAttempts_.get(index);  } else {
          return containersFromPreviousAttemptsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
           getContainersFromPreviousAttemptsOrBuilderList() {
        if (containersFromPreviousAttemptsBuilder_ != null) {
          return containersFromPreviousAttemptsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(containersFromPreviousAttempts_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersFromPreviousAttemptsBuilder() {
        return getContainersFromPreviousAttemptsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersFromPreviousAttemptsBuilder(
          int index) {
        return getContainersFromPreviousAttemptsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder> 
           getContainersFromPreviousAttemptsBuilderList() {
        return getContainersFromPreviousAttemptsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
          getContainersFromPreviousAttemptsFieldBuilder() {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          containersFromPreviousAttemptsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>(
                  containersFromPreviousAttempts_,
                  ((bitField0_ & 0x00000008) != 0),
                  getParentForChildren(),
                  isClean());
          containersFromPreviousAttempts_ = null;
        }
        return containersFromPreviousAttemptsBuilder_;
      }

      private java.lang.Object queue_ = "";
      /**
       * <code>optional string queue = 5;</code>
       * @return Whether the queue field is set.
       */
      public boolean hasQueue() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional string queue = 5;</code>
       * @return The queue.
       */
      public java.lang.String getQueue() {
        java.lang.Object ref = queue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string queue = 5;</code>
       * @return The bytes for queue.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueBytes() {
        java.lang.Object ref = queue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string queue = 5;</code>
       * @param value The queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueue(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueue() {
        queue_ = getDefaultInstance().getQueue();
        bitField0_ = (bitField0_ & ~0x00000010);
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 5;</code>
       * @param value The bytes for queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> nmTokensFromPreviousAttempts_ =
        java.util.Collections.emptyList();
      private void ensureNmTokensFromPreviousAttemptsIsMutable() {
        if (!((bitField0_ & 0x00000020) != 0)) {
          nmTokensFromPreviousAttempts_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto>(nmTokensFromPreviousAttempts_);
          bitField0_ |= 0x00000020;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> nmTokensFromPreviousAttemptsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> getNmTokensFromPreviousAttemptsList() {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nmTokensFromPreviousAttempts_);
        } else {
          return nmTokensFromPreviousAttemptsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public int getNmTokensFromPreviousAttemptsCount() {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          return nmTokensFromPreviousAttempts_.size();
        } else {
          return nmTokensFromPreviousAttemptsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokensFromPreviousAttempts(int index) {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          return nmTokensFromPreviousAttempts_.get(index);
        } else {
          return nmTokensFromPreviousAttemptsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public Builder setNmTokensFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNmTokensFromPreviousAttemptsIsMutable();
          nmTokensFromPreviousAttempts_.set(index, value);
          onChanged();
        } else {
          nmTokensFromPreviousAttemptsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public Builder setNmTokensFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          ensureNmTokensFromPreviousAttemptsIsMutable();
          nmTokensFromPreviousAttempts_.set(index, builderForValue.build());
          onChanged();
        } else {
          nmTokensFromPreviousAttemptsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public Builder addNmTokensFromPreviousAttempts(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNmTokensFromPreviousAttemptsIsMutable();
          nmTokensFromPreviousAttempts_.add(value);
          onChanged();
        } else {
          nmTokensFromPreviousAttemptsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public Builder addNmTokensFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNmTokensFromPreviousAttemptsIsMutable();
          nmTokensFromPreviousAttempts_.add(index, value);
          onChanged();
        } else {
          nmTokensFromPreviousAttemptsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public Builder addNmTokensFromPreviousAttempts(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          ensureNmTokensFromPreviousAttemptsIsMutable();
          nmTokensFromPreviousAttempts_.add(builderForValue.build());
          onChanged();
        } else {
          nmTokensFromPreviousAttemptsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public Builder addNmTokensFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          ensureNmTokensFromPreviousAttemptsIsMutable();
          nmTokensFromPreviousAttempts_.add(index, builderForValue.build());
          onChanged();
        } else {
          nmTokensFromPreviousAttemptsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public Builder addAllNmTokensFromPreviousAttempts(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> values) {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          ensureNmTokensFromPreviousAttemptsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nmTokensFromPreviousAttempts_);
          onChanged();
        } else {
          nmTokensFromPreviousAttemptsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public Builder clearNmTokensFromPreviousAttempts() {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          nmTokensFromPreviousAttempts_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000020);
          onChanged();
        } else {
          nmTokensFromPreviousAttemptsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public Builder removeNmTokensFromPreviousAttempts(int index) {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          ensureNmTokensFromPreviousAttemptsIsMutable();
          nmTokensFromPreviousAttempts_.remove(index);
          onChanged();
        } else {
          nmTokensFromPreviousAttemptsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder getNmTokensFromPreviousAttemptsBuilder(
          int index) {
        return getNmTokensFromPreviousAttemptsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensFromPreviousAttemptsOrBuilder(
          int index) {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          return nmTokensFromPreviousAttempts_.get(index);  } else {
          return nmTokensFromPreviousAttemptsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> 
           getNmTokensFromPreviousAttemptsOrBuilderList() {
        if (nmTokensFromPreviousAttemptsBuilder_ != null) {
          return nmTokensFromPreviousAttemptsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nmTokensFromPreviousAttempts_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder addNmTokensFromPreviousAttemptsBuilder() {
        return getNmTokensFromPreviousAttemptsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder addNmTokensFromPreviousAttemptsBuilder(
          int index) {
        return getNmTokensFromPreviousAttemptsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder> 
           getNmTokensFromPreviousAttemptsBuilderList() {
        return getNmTokensFromPreviousAttemptsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> 
          getNmTokensFromPreviousAttemptsFieldBuilder() {
        if (nmTokensFromPreviousAttemptsBuilder_ == null) {
          nmTokensFromPreviousAttemptsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder>(
                  nmTokensFromPreviousAttempts_,
                  ((bitField0_ & 0x00000020) != 0),
                  getParentForChildren(),
                  isClean());
          nmTokensFromPreviousAttempts_ = null;
        }
        return nmTokensFromPreviousAttemptsBuilder_;
      }

      private java.util.List<java.lang.Integer> schedulerResourceTypes_ =
        java.util.Collections.emptyList();
      private void ensureSchedulerResourceTypesIsMutable() {
        if (!((bitField0_ & 0x00000040) != 0)) {
          schedulerResourceTypes_ = new java.util.ArrayList<java.lang.Integer>(schedulerResourceTypes_);
          bitField0_ |= 0x00000040;
        }
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
       * @return A list containing the schedulerResourceTypes.
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes> getSchedulerResourceTypesList() {
        return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter<
            java.lang.Integer, org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes>(schedulerResourceTypes_, schedulerResourceTypes_converter_);
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
       * @return The count of schedulerResourceTypes.
       */
      public int getSchedulerResourceTypesCount() {
        return schedulerResourceTypes_.size();
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
       * @param index The index of the element to return.
       * @return The schedulerResourceTypes at the given index.
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes getSchedulerResourceTypes(int index) {
        return schedulerResourceTypes_converter_.convert(schedulerResourceTypes_.get(index));
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
       * @param index The index to set the value at.
       * @param value The schedulerResourceTypes to set.
       * @return This builder for chaining.
       */
      public Builder setSchedulerResourceTypes(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes value) {
        if (value == null) {
          throw new NullPointerException();
        }
        ensureSchedulerResourceTypesIsMutable();
        schedulerResourceTypes_.set(index, value.getNumber());
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
       * @param value The schedulerResourceTypes to add.
       * @return This builder for chaining.
       */
      public Builder addSchedulerResourceTypes(org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes value) {
        if (value == null) {
          throw new NullPointerException();
        }
        ensureSchedulerResourceTypesIsMutable();
        schedulerResourceTypes_.add(value.getNumber());
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
       * @param values The schedulerResourceTypes to add.
       * @return This builder for chaining.
       */
      public Builder addAllSchedulerResourceTypes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes> values) {
        ensureSchedulerResourceTypesIsMutable();
        for (org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes value : values) {
          schedulerResourceTypes_.add(value.getNumber());
        }
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7;</code>
       * @return This builder for chaining.
       */
      public Builder clearSchedulerResourceTypes() {
        schedulerResourceTypes_ = java.util.Collections.emptyList();
        bitField0_ = (bitField0_ & ~0x00000040);
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto resourceProfiles_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder> resourceProfilesBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
       * @return Whether the resourceProfiles field is set.
       */
      public boolean hasResourceProfiles() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
       * @return The resourceProfiles.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles() {
        if (resourceProfilesBuilder_ == null) {
          return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_;
        } else {
          return resourceProfilesBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
       */
      public Builder setResourceProfiles(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto value) {
        if (resourceProfilesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resourceProfiles_ = value;
        } else {
          resourceProfilesBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
       */
      public Builder setResourceProfiles(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder builderForValue) {
        if (resourceProfilesBuilder_ == null) {
          resourceProfiles_ = builderForValue.build();
        } else {
          resourceProfilesBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
       */
      public Builder mergeResourceProfiles(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto value) {
        if (resourceProfilesBuilder_ == null) {
          if (((bitField0_ & 0x00000080) != 0) &&
            resourceProfiles_ != null &&
            resourceProfiles_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance()) {
            getResourceProfilesBuilder().mergeFrom(value);
          } else {
            resourceProfiles_ = value;
          }
        } else {
          resourceProfilesBuilder_.mergeFrom(value);
        }
        if (resourceProfiles_ != null) {
          bitField0_ |= 0x00000080;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
       */
      public Builder clearResourceProfiles() {
        bitField0_ = (bitField0_ & ~0x00000080);
        resourceProfiles_ = null;
        if (resourceProfilesBuilder_ != null) {
          resourceProfilesBuilder_.dispose();
          resourceProfilesBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder getResourceProfilesBuilder() {
        bitField0_ |= 0x00000080;
        onChanged();
        return getResourceProfilesFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder() {
        if (resourceProfilesBuilder_ != null) {
          return resourceProfilesBuilder_.getMessageOrBuilder();
        } else {
          return resourceProfiles_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder> 
          getResourceProfilesFieldBuilder() {
        if (resourceProfilesBuilder_ == null) {
          resourceProfilesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder>(
                  getResourceProfiles(),
                  getParentForChildren(),
                  isClean());
          resourceProfiles_ = null;
        }
        return resourceProfilesBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> resourceTypes_ =
        java.util.Collections.emptyList();
      private void ensureResourceTypesIsMutable() {
        if (!((bitField0_ & 0x00000100) != 0)) {
          resourceTypes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto>(resourceTypes_);
          bitField0_ |= 0x00000100;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> resourceTypesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> getResourceTypesList() {
        if (resourceTypesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(resourceTypes_);
        } else {
          return resourceTypesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public int getResourceTypesCount() {
        if (resourceTypesBuilder_ == null) {
          return resourceTypes_.size();
        } else {
          return resourceTypesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypes(int index) {
        if (resourceTypesBuilder_ == null) {
          return resourceTypes_.get(index);
        } else {
          return resourceTypesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public Builder setResourceTypes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) {
        if (resourceTypesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceTypesIsMutable();
          resourceTypes_.set(index, value);
          onChanged();
        } else {
          resourceTypesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public Builder setResourceTypes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) {
        if (resourceTypesBuilder_ == null) {
          ensureResourceTypesIsMutable();
          resourceTypes_.set(index, builderForValue.build());
          onChanged();
        } else {
          resourceTypesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public Builder addResourceTypes(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) {
        if (resourceTypesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceTypesIsMutable();
          resourceTypes_.add(value);
          onChanged();
        } else {
          resourceTypesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public Builder addResourceTypes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) {
        if (resourceTypesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceTypesIsMutable();
          resourceTypes_.add(index, value);
          onChanged();
        } else {
          resourceTypesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public Builder addResourceTypes(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) {
        if (resourceTypesBuilder_ == null) {
          ensureResourceTypesIsMutable();
          resourceTypes_.add(builderForValue.build());
          onChanged();
        } else {
          resourceTypesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public Builder addResourceTypes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) {
        if (resourceTypesBuilder_ == null) {
          ensureResourceTypesIsMutable();
          resourceTypes_.add(index, builderForValue.build());
          onChanged();
        } else {
          resourceTypesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public Builder addAllResourceTypes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> values) {
        if (resourceTypesBuilder_ == null) {
          ensureResourceTypesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, resourceTypes_);
          onChanged();
        } else {
          resourceTypesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public Builder clearResourceTypes() {
        if (resourceTypesBuilder_ == null) {
          resourceTypes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000100);
          onChanged();
        } else {
          resourceTypesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public Builder removeResourceTypes(int index) {
        if (resourceTypesBuilder_ == null) {
          ensureResourceTypesIsMutable();
          resourceTypes_.remove(index);
          onChanged();
        } else {
          resourceTypesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder getResourceTypesBuilder(
          int index) {
        return getResourceTypesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypesOrBuilder(
          int index) {
        if (resourceTypesBuilder_ == null) {
          return resourceTypes_.get(index);  } else {
          return resourceTypesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> 
           getResourceTypesOrBuilderList() {
        if (resourceTypesBuilder_ != null) {
          return resourceTypesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(resourceTypes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder addResourceTypesBuilder() {
        return getResourceTypesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder addResourceTypesBuilder(
          int index) {
        return getResourceTypesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder> 
           getResourceTypesBuilderList() {
        return getResourceTypesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> 
          getResourceTypesFieldBuilder() {
        if (resourceTypesBuilder_ == null) {
          resourceTypesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder>(
                  resourceTypes_,
                  ((bitField0_ & 0x00000100) != 0),
                  getParentForChildren(),
                  isClean());
          resourceTypes_ = null;
        }
        return resourceTypesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.RegisterApplicationMasterResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.RegisterApplicationMasterResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<RegisterApplicationMasterResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<RegisterApplicationMasterResponseProto>() {
      @java.lang.Override
      public RegisterApplicationMasterResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<RegisterApplicationMasterResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<RegisterApplicationMasterResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface FinishApplicationMasterRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.FinishApplicationMasterRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string diagnostics = 1;</code>
     * @return Whether the diagnostics field is set.
     */
    boolean hasDiagnostics();
    /**
     * <code>optional string diagnostics = 1;</code>
     * @return The diagnostics.
     */
    java.lang.String getDiagnostics();
    /**
     * <code>optional string diagnostics = 1;</code>
     * @return The bytes for diagnostics.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes();

    /**
     * <code>optional string tracking_url = 2;</code>
     * @return Whether the trackingUrl field is set.
     */
    boolean hasTrackingUrl();
    /**
     * <code>optional string tracking_url = 2;</code>
     * @return The trackingUrl.
     */
    java.lang.String getTrackingUrl();
    /**
     * <code>optional string tracking_url = 2;</code>
     * @return The bytes for trackingUrl.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes();

    /**
     * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3;</code>
     * @return Whether the finalApplicationStatus field is set.
     */
    boolean hasFinalApplicationStatus();
    /**
     * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3;</code>
     * @return The finalApplicationStatus.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus();
  }
  /**
   * Protobuf type {@code hadoop.yarn.FinishApplicationMasterRequestProto}
   */
  public static final class FinishApplicationMasterRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.FinishApplicationMasterRequestProto)
      FinishApplicationMasterRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use FinishApplicationMasterRequestProto.newBuilder() to construct.
    private FinishApplicationMasterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private FinishApplicationMasterRequestProto() {
      diagnostics_ = "";
      trackingUrl_ = "";
      finalApplicationStatus_ = 0;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new FinishApplicationMasterRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int DIAGNOSTICS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object diagnostics_ = "";
    /**
     * <code>optional string diagnostics = 1;</code>
     * @return Whether the diagnostics field is set.
     */
    @java.lang.Override
    public boolean hasDiagnostics() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string diagnostics = 1;</code>
     * @return The diagnostics.
     */
    @java.lang.Override
    public java.lang.String getDiagnostics() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnostics_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string diagnostics = 1;</code>
     * @return The bytes for diagnostics.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnostics_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int TRACKING_URL_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object trackingUrl_ = "";
    /**
     * <code>optional string tracking_url = 2;</code>
     * @return Whether the trackingUrl field is set.
     */
    @java.lang.Override
    public boolean hasTrackingUrl() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string tracking_url = 2;</code>
     * @return The trackingUrl.
     */
    @java.lang.Override
    public java.lang.String getTrackingUrl() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          trackingUrl_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string tracking_url = 2;</code>
     * @return The bytes for trackingUrl.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        trackingUrl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int FINAL_APPLICATION_STATUS_FIELD_NUMBER = 3;
    private int finalApplicationStatus_ = 0;
    /**
     * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3;</code>
     * @return Whether the finalApplicationStatus field is set.
     */
    @java.lang.Override public boolean hasFinalApplicationStatus() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3;</code>
     * @return The finalApplicationStatus.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
      org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.forNumber(finalApplicationStatus_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, diagnostics_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, trackingUrl_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeEnum(3, finalApplicationStatus_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, diagnostics_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, trackingUrl_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(3, finalApplicationStatus_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto) obj;

      if (hasDiagnostics() != other.hasDiagnostics()) return false;
      if (hasDiagnostics()) {
        if (!getDiagnostics()
            .equals(other.getDiagnostics())) return false;
      }
      if (hasTrackingUrl() != other.hasTrackingUrl()) return false;
      if (hasTrackingUrl()) {
        if (!getTrackingUrl()
            .equals(other.getTrackingUrl())) return false;
      }
      if (hasFinalApplicationStatus() != other.hasFinalApplicationStatus()) return false;
      if (hasFinalApplicationStatus()) {
        if (finalApplicationStatus_ != other.finalApplicationStatus_) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasDiagnostics()) {
        hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnostics().hashCode();
      }
      if (hasTrackingUrl()) {
        hash = (37 * hash) + TRACKING_URL_FIELD_NUMBER;
        hash = (53 * hash) + getTrackingUrl().hashCode();
      }
      if (hasFinalApplicationStatus()) {
        hash = (37 * hash) + FINAL_APPLICATION_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + finalApplicationStatus_;
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.FinishApplicationMasterRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.FinishApplicationMasterRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        diagnostics_ = "";
        trackingUrl_ = "";
        finalApplicationStatus_ = 0;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.diagnostics_ = diagnostics_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.trackingUrl_ = trackingUrl_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.finalApplicationStatus_ = finalApplicationStatus_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.getDefaultInstance()) return this;
        if (other.hasDiagnostics()) {
          diagnostics_ = other.diagnostics_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasTrackingUrl()) {
          trackingUrl_ = other.trackingUrl_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasFinalApplicationStatus()) {
          setFinalApplicationStatus(other.getFinalApplicationStatus());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                diagnostics_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                trackingUrl_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(3, tmpRaw);
                } else {
                  finalApplicationStatus_ = tmpRaw;
                  bitField0_ |= 0x00000004;
                }
                break;
              } // case 24
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object diagnostics_ = "";
      /**
       * <code>optional string diagnostics = 1;</code>
       * @return Whether the diagnostics field is set.
       */
      public boolean hasDiagnostics() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string diagnostics = 1;</code>
       * @return The diagnostics.
       */
      public java.lang.String getDiagnostics() {
        java.lang.Object ref = diagnostics_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnostics_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 1;</code>
       * @return The bytes for diagnostics.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsBytes() {
        java.lang.Object ref = diagnostics_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnostics_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 1;</code>
       * @param value The diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnostics(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearDiagnostics() {
        diagnostics_ = getDefaultInstance().getDiagnostics();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 1;</code>
       * @param value The bytes for diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnosticsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object trackingUrl_ = "";
      /**
       * <code>optional string tracking_url = 2;</code>
       * @return Whether the trackingUrl field is set.
       */
      public boolean hasTrackingUrl() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string tracking_url = 2;</code>
       * @return The trackingUrl.
       */
      public java.lang.String getTrackingUrl() {
        java.lang.Object ref = trackingUrl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            trackingUrl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string tracking_url = 2;</code>
       * @return The bytes for trackingUrl.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTrackingUrlBytes() {
        java.lang.Object ref = trackingUrl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          trackingUrl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string tracking_url = 2;</code>
       * @param value The trackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setTrackingUrl(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        trackingUrl_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string tracking_url = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearTrackingUrl() {
        trackingUrl_ = getDefaultInstance().getTrackingUrl();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string tracking_url = 2;</code>
       * @param value The bytes for trackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setTrackingUrlBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        trackingUrl_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private int finalApplicationStatus_ = 0;
      /**
       * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3;</code>
       * @return Whether the finalApplicationStatus field is set.
       */
      @java.lang.Override public boolean hasFinalApplicationStatus() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3;</code>
       * @return The finalApplicationStatus.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
        org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.forNumber(finalApplicationStatus_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
      }
      /**
       * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3;</code>
       * @param value The finalApplicationStatus to set.
       * @return This builder for chaining.
       */
      public Builder setFinalApplicationStatus(org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000004;
        finalApplicationStatus_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearFinalApplicationStatus() {
        bitField0_ = (bitField0_ & ~0x00000004);
        finalApplicationStatus_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.FinishApplicationMasterRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.FinishApplicationMasterRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<FinishApplicationMasterRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<FinishApplicationMasterRequestProto>() {
      @java.lang.Override
      public FinishApplicationMasterRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<FinishApplicationMasterRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<FinishApplicationMasterRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface FinishApplicationMasterResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.FinishApplicationMasterResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional bool isUnregistered = 1 [default = false];</code>
     * @return Whether the isUnregistered field is set.
     */
    boolean hasIsUnregistered();
    /**
     * <code>optional bool isUnregistered = 1 [default = false];</code>
     * @return The isUnregistered.
     */
    boolean getIsUnregistered();
  }
  /**
   * Protobuf type {@code hadoop.yarn.FinishApplicationMasterResponseProto}
   */
  public static final class FinishApplicationMasterResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.FinishApplicationMasterResponseProto)
      FinishApplicationMasterResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use FinishApplicationMasterResponseProto.newBuilder() to construct.
    private FinishApplicationMasterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private FinishApplicationMasterResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new FinishApplicationMasterResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int ISUNREGISTERED_FIELD_NUMBER = 1;
    private boolean isUnregistered_ = false;
    /**
     * <code>optional bool isUnregistered = 1 [default = false];</code>
     * @return Whether the isUnregistered field is set.
     */
    @java.lang.Override
    public boolean hasIsUnregistered() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional bool isUnregistered = 1 [default = false];</code>
     * @return The isUnregistered.
     */
    @java.lang.Override
    public boolean getIsUnregistered() {
      return isUnregistered_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeBool(1, isUnregistered_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(1, isUnregistered_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto) obj;

      if (hasIsUnregistered() != other.hasIsUnregistered()) return false;
      if (hasIsUnregistered()) {
        if (getIsUnregistered()
            != other.getIsUnregistered()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasIsUnregistered()) {
        hash = (37 * hash) + ISUNREGISTERED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getIsUnregistered());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.FinishApplicationMasterResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.FinishApplicationMasterResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        isUnregistered_ = false;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.isUnregistered_ = isUnregistered_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.getDefaultInstance()) return this;
        if (other.hasIsUnregistered()) {
          setIsUnregistered(other.getIsUnregistered());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                isUnregistered_ = input.readBool();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private boolean isUnregistered_ ;
      /**
       * <code>optional bool isUnregistered = 1 [default = false];</code>
       * @return Whether the isUnregistered field is set.
       */
      @java.lang.Override
      public boolean hasIsUnregistered() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional bool isUnregistered = 1 [default = false];</code>
       * @return The isUnregistered.
       */
      @java.lang.Override
      public boolean getIsUnregistered() {
        return isUnregistered_;
      }
      /**
       * <code>optional bool isUnregistered = 1 [default = false];</code>
       * @param value The isUnregistered to set.
       * @return This builder for chaining.
       */
      public Builder setIsUnregistered(boolean value) {

        isUnregistered_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool isUnregistered = 1 [default = false];</code>
       * @return This builder for chaining.
       */
      public Builder clearIsUnregistered() {
        bitField0_ = (bitField0_ & ~0x00000001);
        isUnregistered_ = false;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.FinishApplicationMasterResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.FinishApplicationMasterResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<FinishApplicationMasterResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<FinishApplicationMasterResponseProto>() {
      @java.lang.Override
      public FinishApplicationMasterResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<FinishApplicationMasterResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<FinishApplicationMasterResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface UpdateContainerRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateContainerRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required int32 container_version = 1;</code>
     * @return Whether the containerVersion field is set.
     */
    boolean hasContainerVersion();
    /**
     * <code>required int32 container_version = 1;</code>
     * @return The containerVersion.
     */
    int getContainerVersion();

    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
     * @return Whether the containerId field is set.
     */
    boolean hasContainerId();
    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
     * @return The containerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3;</code>
     * @return Whether the updateType field is set.
     */
    boolean hasUpdateType();
    /**
     * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3;</code>
     * @return The updateType.
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType();

    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
     * @return Whether the capability field is set.
     */
    boolean hasCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
     * @return The capability.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 5;</code>
     * @return Whether the executionType field is set.
     */
    boolean hasExecutionType();
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 5;</code>
     * @return The executionType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType();
  }
  /**
   * Protobuf type {@code hadoop.yarn.UpdateContainerRequestProto}
   */
  public static final class UpdateContainerRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateContainerRequestProto)
      UpdateContainerRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use UpdateContainerRequestProto.newBuilder() to construct.
    private UpdateContainerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private UpdateContainerRequestProto() {
      updateType_ = 0;
      executionType_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new UpdateContainerRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_VERSION_FIELD_NUMBER = 1;
    private int containerVersion_ = 0;
    /**
     * <code>required int32 container_version = 1;</code>
     * @return Whether the containerVersion field is set.
     */
    @java.lang.Override
    public boolean hasContainerVersion() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required int32 container_version = 1;</code>
     * @return The containerVersion.
     */
    @java.lang.Override
    public int getContainerVersion() {
      return containerVersion_;
    }

    public static final int CONTAINER_ID_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
     * @return Whether the containerId field is set.
     */
    @java.lang.Override
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
     * @return The containerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int UPDATE_TYPE_FIELD_NUMBER = 3;
    private int updateType_ = 0;
    /**
     * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3;</code>
     * @return Whether the updateType field is set.
     */
    @java.lang.Override public boolean hasUpdateType() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3;</code>
     * @return The updateType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType() {
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.forNumber(updateType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.INCREASE_RESOURCE : result;
    }

    public static final int CAPABILITY_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
     * @return Whether the capability field is set.
     */
    @java.lang.Override
    public boolean hasCapability() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
     * @return The capability.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() {
      return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() {
      return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
    }

    public static final int EXECUTION_TYPE_FIELD_NUMBER = 5;
    private int executionType_ = 1;
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 5;</code>
     * @return Whether the executionType field is set.
     */
    @java.lang.Override public boolean hasExecutionType() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 5;</code>
     * @return The executionType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasContainerVersion()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasContainerId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasUpdateType()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (hasCapability()) {
        if (!getCapability().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, containerVersion_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getContainerId());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeEnum(3, updateType_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(4, getCapability());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeEnum(5, executionType_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, containerVersion_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getContainerId());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(3, updateType_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getCapability());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(5, executionType_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto) obj;

      if (hasContainerVersion() != other.hasContainerVersion()) return false;
      if (hasContainerVersion()) {
        if (getContainerVersion()
            != other.getContainerVersion()) return false;
      }
      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (hasUpdateType() != other.hasUpdateType()) return false;
      if (hasUpdateType()) {
        if (updateType_ != other.updateType_) return false;
      }
      if (hasCapability() != other.hasCapability()) return false;
      if (hasCapability()) {
        if (!getCapability()
            .equals(other.getCapability())) return false;
      }
      if (hasExecutionType() != other.hasExecutionType()) return false;
      if (hasExecutionType()) {
        if (executionType_ != other.executionType_) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerVersion()) {
        hash = (37 * hash) + CONTAINER_VERSION_FIELD_NUMBER;
        hash = (53 * hash) + getContainerVersion();
      }
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (hasUpdateType()) {
        hash = (37 * hash) + UPDATE_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + updateType_;
      }
      if (hasCapability()) {
        hash = (37 * hash) + CAPABILITY_FIELD_NUMBER;
        hash = (53 * hash) + getCapability().hashCode();
      }
      if (hasExecutionType()) {
        hash = (37 * hash) + EXECUTION_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + executionType_;
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.UpdateContainerRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateContainerRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
          getCapabilityFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerVersion_ = 0;
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        updateType_ = 0;
        capability_ = null;
        if (capabilityBuilder_ != null) {
          capabilityBuilder_.dispose();
          capabilityBuilder_ = null;
        }
        executionType_ = 1;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerVersion_ = containerVersion_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.containerId_ = containerIdBuilder_ == null
              ? containerId_
              : containerIdBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.updateType_ = updateType_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.capability_ = capabilityBuilder_ == null
              ? capability_
              : capabilityBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.executionType_ = executionType_;
          to_bitField0_ |= 0x00000010;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance()) return this;
        if (other.hasContainerVersion()) {
          setContainerVersion(other.getContainerVersion());
        }
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (other.hasUpdateType()) {
          setUpdateType(other.getUpdateType());
        }
        if (other.hasCapability()) {
          mergeCapability(other.getCapability());
        }
        if (other.hasExecutionType()) {
          setExecutionType(other.getExecutionType());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasContainerVersion()) {
          return false;
        }
        if (!hasContainerId()) {
          return false;
        }
        if (!hasUpdateType()) {
          return false;
        }
        if (hasCapability()) {
          if (!getCapability().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                containerVersion_ = input.readInt32();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 18: {
                input.readMessage(
                    getContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(3, tmpRaw);
                } else {
                  updateType_ = tmpRaw;
                  bitField0_ |= 0x00000004;
                }
                break;
              } // case 24
              case 34: {
                input.readMessage(
                    getCapabilityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 40: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(5, tmpRaw);
                } else {
                  executionType_ = tmpRaw;
                  bitField0_ |= 0x00000010;
                }
                break;
              } // case 40
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int containerVersion_ ;
      /**
       * <code>required int32 container_version = 1;</code>
       * @return Whether the containerVersion field is set.
       */
      @java.lang.Override
      public boolean hasContainerVersion() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required int32 container_version = 1;</code>
       * @return The containerVersion.
       */
      @java.lang.Override
      public int getContainerVersion() {
        return containerVersion_;
      }
      /**
       * <code>required int32 container_version = 1;</code>
       * @param value The containerVersion to set.
       * @return This builder for chaining.
       */
      public Builder setContainerVersion(int value) {

        containerVersion_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required int32 container_version = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearContainerVersion() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerVersion_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
       * @return Whether the containerId field is set.
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
       * @return The containerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            containerId_ != null &&
            containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getContainerIdBuilder().mergeFrom(value);
          } else {
            containerId_ = value;
          }
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        if (containerId_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
       */
      public Builder clearContainerId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private int updateType_ = 0;
      /**
       * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3;</code>
       * @return Whether the updateType field is set.
       */
      @java.lang.Override public boolean hasUpdateType() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3;</code>
       * @return The updateType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.forNumber(updateType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.INCREASE_RESOURCE : result;
      }
      /**
       * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3;</code>
       * @param value The updateType to set.
       * @return This builder for chaining.
       */
      public Builder setUpdateType(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000004;
        updateType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearUpdateType() {
        bitField0_ = (bitField0_ & ~0x00000004);
        updateType_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> capabilityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
       * @return Whether the capability field is set.
       */
      public boolean hasCapability() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
       * @return The capability.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() {
        if (capabilityBuilder_ == null) {
          return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
        } else {
          return capabilityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
       */
      public Builder setCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (capabilityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          capability_ = value;
        } else {
          capabilityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
       */
      public Builder setCapability(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (capabilityBuilder_ == null) {
          capability_ = builderForValue.build();
        } else {
          capabilityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
       */
      public Builder mergeCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (capabilityBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
            capability_ != null &&
            capability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getCapabilityBuilder().mergeFrom(value);
          } else {
            capability_ = value;
          }
        } else {
          capabilityBuilder_.mergeFrom(value);
        }
        if (capability_ != null) {
          bitField0_ |= 0x00000008;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
       */
      public Builder clearCapability() {
        bitField0_ = (bitField0_ & ~0x00000008);
        capability_ = null;
        if (capabilityBuilder_ != null) {
          capabilityBuilder_.dispose();
          capabilityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getCapabilityBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getCapabilityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() {
        if (capabilityBuilder_ != null) {
          return capabilityBuilder_.getMessageOrBuilder();
        } else {
          return capability_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 4;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getCapabilityFieldBuilder() {
        if (capabilityBuilder_ == null) {
          capabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getCapability(),
                  getParentForChildren(),
                  isClean());
          capability_ = null;
        }
        return capabilityBuilder_;
      }

      private int executionType_ = 1;
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 5;</code>
       * @return Whether the executionType field is set.
       */
      @java.lang.Override public boolean hasExecutionType() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 5;</code>
       * @return The executionType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 5;</code>
       * @param value The executionType to set.
       * @return This builder for chaining.
       */
      public Builder setExecutionType(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000010;
        executionType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearExecutionType() {
        bitField0_ = (bitField0_ & ~0x00000010);
        executionType_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateContainerRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateContainerRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<UpdateContainerRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<UpdateContainerRequestProto>() {
      @java.lang.Override
      public UpdateContainerRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<UpdateContainerRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<UpdateContainerRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface UpdateContainerErrorProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateContainerErrorProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string reason = 1;</code>
     * @return Whether the reason field is set.
     */
    boolean hasReason();
    /**
     * <code>optional string reason = 1;</code>
     * @return The reason.
     */
    java.lang.String getReason();
    /**
     * <code>optional string reason = 1;</code>
     * @return The bytes for reason.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getReasonBytes();

    /**
     * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
     * @return Whether the updateRequest field is set.
     */
    boolean hasUpdateRequest();
    /**
     * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
     * @return The updateRequest.
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequest();
    /**
     * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestOrBuilder();

    /**
     * <code>optional int32 current_container_version = 3;</code>
     * @return Whether the currentContainerVersion field is set.
     */
    boolean hasCurrentContainerVersion();
    /**
     * <code>optional int32 current_container_version = 3;</code>
     * @return The currentContainerVersion.
     */
    int getCurrentContainerVersion();
  }
  /**
   * Protobuf type {@code hadoop.yarn.UpdateContainerErrorProto}
   */
  public static final class UpdateContainerErrorProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateContainerErrorProto)
      UpdateContainerErrorProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use UpdateContainerErrorProto.newBuilder() to construct.
    private UpdateContainerErrorProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private UpdateContainerErrorProto() {
      reason_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new UpdateContainerErrorProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerErrorProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder.class);
    }

    private int bitField0_;
    public static final int REASON_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object reason_ = "";
    /**
     * <code>optional string reason = 1;</code>
     * @return Whether the reason field is set.
     */
    @java.lang.Override
    public boolean hasReason() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string reason = 1;</code>
     * @return The reason.
     */
    @java.lang.Override
    public java.lang.String getReason() {
      java.lang.Object ref = reason_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          reason_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string reason = 1;</code>
     * @return The bytes for reason.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getReasonBytes() {
      java.lang.Object ref = reason_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        reason_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int UPDATE_REQUEST_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto updateRequest_;
    /**
     * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
     * @return Whether the updateRequest field is set.
     */
    @java.lang.Override
    public boolean hasUpdateRequest() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
     * @return The updateRequest.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequest() {
      return updateRequest_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance() : updateRequest_;
    }
    /**
     * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestOrBuilder() {
      return updateRequest_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance() : updateRequest_;
    }

    public static final int CURRENT_CONTAINER_VERSION_FIELD_NUMBER = 3;
    private int currentContainerVersion_ = 0;
    /**
     * <code>optional int32 current_container_version = 3;</code>
     * @return Whether the currentContainerVersion field is set.
     */
    @java.lang.Override
    public boolean hasCurrentContainerVersion() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int32 current_container_version = 3;</code>
     * @return The currentContainerVersion.
     */
    @java.lang.Override
    public int getCurrentContainerVersion() {
      return currentContainerVersion_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasUpdateRequest()) {
        if (!getUpdateRequest().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, reason_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getUpdateRequest());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt32(3, currentContainerVersion_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, reason_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getUpdateRequest());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(3, currentContainerVersion_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto) obj;

      if (hasReason() != other.hasReason()) return false;
      if (hasReason()) {
        if (!getReason()
            .equals(other.getReason())) return false;
      }
      if (hasUpdateRequest() != other.hasUpdateRequest()) return false;
      if (hasUpdateRequest()) {
        if (!getUpdateRequest()
            .equals(other.getUpdateRequest())) return false;
      }
      if (hasCurrentContainerVersion() != other.hasCurrentContainerVersion()) return false;
      if (hasCurrentContainerVersion()) {
        if (getCurrentContainerVersion()
            != other.getCurrentContainerVersion()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasReason()) {
        hash = (37 * hash) + REASON_FIELD_NUMBER;
        hash = (53 * hash) + getReason().hashCode();
      }
      if (hasUpdateRequest()) {
        hash = (37 * hash) + UPDATE_REQUEST_FIELD_NUMBER;
        hash = (53 * hash) + getUpdateRequest().hashCode();
      }
      if (hasCurrentContainerVersion()) {
        hash = (37 * hash) + CURRENT_CONTAINER_VERSION_FIELD_NUMBER;
        hash = (53 * hash) + getCurrentContainerVersion();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.UpdateContainerErrorProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateContainerErrorProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerErrorProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getUpdateRequestFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        reason_ = "";
        updateRequest_ = null;
        if (updateRequestBuilder_ != null) {
          updateRequestBuilder_.dispose();
          updateRequestBuilder_ = null;
        }
        currentContainerVersion_ = 0;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.reason_ = reason_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.updateRequest_ = updateRequestBuilder_ == null
              ? updateRequest_
              : updateRequestBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.currentContainerVersion_ = currentContainerVersion_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.getDefaultInstance()) return this;
        if (other.hasReason()) {
          reason_ = other.reason_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasUpdateRequest()) {
          mergeUpdateRequest(other.getUpdateRequest());
        }
        if (other.hasCurrentContainerVersion()) {
          setCurrentContainerVersion(other.getCurrentContainerVersion());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasUpdateRequest()) {
          if (!getUpdateRequest().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                reason_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getUpdateRequestFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                currentContainerVersion_ = input.readInt32();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object reason_ = "";
      /**
       * <code>optional string reason = 1;</code>
       * @return Whether the reason field is set.
       */
      public boolean hasReason() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string reason = 1;</code>
       * @return The reason.
       */
      public java.lang.String getReason() {
        java.lang.Object ref = reason_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            reason_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string reason = 1;</code>
       * @return The bytes for reason.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getReasonBytes() {
        java.lang.Object ref = reason_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          reason_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string reason = 1;</code>
       * @param value The reason to set.
       * @return This builder for chaining.
       */
      public Builder setReason(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        reason_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string reason = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearReason() {
        reason_ = getDefaultInstance().getReason();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string reason = 1;</code>
       * @param value The bytes for reason to set.
       * @return This builder for chaining.
       */
      public Builder setReasonBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        reason_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto updateRequest_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> updateRequestBuilder_;
      /**
       * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
       * @return Whether the updateRequest field is set.
       */
      public boolean hasUpdateRequest() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
       * @return The updateRequest.
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequest() {
        if (updateRequestBuilder_ == null) {
          return updateRequest_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance() : updateRequest_;
        } else {
          return updateRequestBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
       */
      public Builder setUpdateRequest(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto value) {
        if (updateRequestBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          updateRequest_ = value;
        } else {
          updateRequestBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
       */
      public Builder setUpdateRequest(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder builderForValue) {
        if (updateRequestBuilder_ == null) {
          updateRequest_ = builderForValue.build();
        } else {
          updateRequestBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
       */
      public Builder mergeUpdateRequest(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto value) {
        if (updateRequestBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            updateRequest_ != null &&
            updateRequest_ != org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance()) {
            getUpdateRequestBuilder().mergeFrom(value);
          } else {
            updateRequest_ = value;
          }
        } else {
          updateRequestBuilder_.mergeFrom(value);
        }
        if (updateRequest_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
       */
      public Builder clearUpdateRequest() {
        bitField0_ = (bitField0_ & ~0x00000002);
        updateRequest_ = null;
        if (updateRequestBuilder_ != null) {
          updateRequestBuilder_.dispose();
          updateRequestBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder getUpdateRequestBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getUpdateRequestFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestOrBuilder() {
        if (updateRequestBuilder_ != null) {
          return updateRequestBuilder_.getMessageOrBuilder();
        } else {
          return updateRequest_ == null ?
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance() : updateRequest_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> 
          getUpdateRequestFieldBuilder() {
        if (updateRequestBuilder_ == null) {
          updateRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder>(
                  getUpdateRequest(),
                  getParentForChildren(),
                  isClean());
          updateRequest_ = null;
        }
        return updateRequestBuilder_;
      }

      private int currentContainerVersion_ ;
      /**
       * <code>optional int32 current_container_version = 3;</code>
       * @return Whether the currentContainerVersion field is set.
       */
      @java.lang.Override
      public boolean hasCurrentContainerVersion() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int32 current_container_version = 3;</code>
       * @return The currentContainerVersion.
       */
      @java.lang.Override
      public int getCurrentContainerVersion() {
        return currentContainerVersion_;
      }
      /**
       * <code>optional int32 current_container_version = 3;</code>
       * @param value The currentContainerVersion to set.
       * @return This builder for chaining.
       */
      public Builder setCurrentContainerVersion(int value) {

        currentContainerVersion_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 current_container_version = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearCurrentContainerVersion() {
        bitField0_ = (bitField0_ & ~0x00000004);
        currentContainerVersion_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateContainerErrorProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateContainerErrorProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<UpdateContainerErrorProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<UpdateContainerErrorProto>() {
      @java.lang.Override
      public UpdateContainerErrorProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<UpdateContainerErrorProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<UpdateContainerErrorProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface AllocateRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.AllocateRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> 
        getAskList();
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAsk(int index);
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
     */
    int getAskCount();
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> 
        getAskOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAskOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> 
        getReleaseList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getRelease(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
     */
    int getReleaseCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getReleaseOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getReleaseOrBuilder(
        int index);

    /**
     * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
     * @return Whether the blacklistRequest field is set.
     */
    boolean hasBlacklistRequest();
    /**
     * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
     * @return The blacklistRequest.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getBlacklistRequest();
    /**
     * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder getBlacklistRequestOrBuilder();

    /**
     * <code>optional int32 response_id = 4;</code>
     * @return Whether the responseId field is set.
     */
    boolean hasResponseId();
    /**
     * <code>optional int32 response_id = 4;</code>
     * @return The responseId.
     */
    int getResponseId();

    /**
     * <code>optional float progress = 5;</code>
     * @return Whether the progress field is set.
     */
    boolean hasProgress();
    /**
     * <code>optional float progress = 5;</code>
     * @return The progress.
     */
    float getProgress();

    /**
     * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto> 
        getUpdateRequestsList();
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
     */
    int getUpdateRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> 
        getUpdateRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto> 
        getSchedulingRequestsList();
    /**
     * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getSchedulingRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
     */
    int getSchedulingRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> 
        getSchedulingRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getSchedulingRequestsOrBuilder(
        int index);

    /**
     * <code>optional string tracking_url = 11;</code>
     * @return Whether the trackingUrl field is set.
     */
    boolean hasTrackingUrl();
    /**
     * <code>optional string tracking_url = 11;</code>
     * @return The trackingUrl.
     */
    java.lang.String getTrackingUrl();
    /**
     * <code>optional string tracking_url = 11;</code>
     * @return The bytes for trackingUrl.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.AllocateRequestProto}
   */
  public static final class AllocateRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.AllocateRequestProto)
      AllocateRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use AllocateRequestProto.newBuilder() to construct.
    private AllocateRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private AllocateRequestProto() {
      ask_ = java.util.Collections.emptyList();
      release_ = java.util.Collections.emptyList();
      updateRequests_ = java.util.Collections.emptyList();
      schedulingRequests_ = java.util.Collections.emptyList();
      trackingUrl_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new AllocateRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int ASK_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> ask_;
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> getAskList() {
      return ask_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> 
        getAskOrBuilderList() {
      return ask_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
     */
    @java.lang.Override
    public int getAskCount() {
      return ask_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAsk(int index) {
      return ask_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAskOrBuilder(
        int index) {
      return ask_.get(index);
    }

    public static final int RELEASE_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> release_;
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getReleaseList() {
      return release_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getReleaseOrBuilderList() {
      return release_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
     */
    @java.lang.Override
    public int getReleaseCount() {
      return release_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getRelease(int index) {
      return release_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getReleaseOrBuilder(
        int index) {
      return release_.get(index);
    }

    public static final int BLACKLIST_REQUEST_FIELD_NUMBER = 3;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto blacklistRequest_;
    /**
     * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
     * @return Whether the blacklistRequest field is set.
     */
    @java.lang.Override
    public boolean hasBlacklistRequest() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
     * @return The blacklistRequest.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getBlacklistRequest() {
      return blacklistRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance() : blacklistRequest_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder getBlacklistRequestOrBuilder() {
      return blacklistRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance() : blacklistRequest_;
    }

    public static final int RESPONSE_ID_FIELD_NUMBER = 4;
    private int responseId_ = 0;
    /**
     * <code>optional int32 response_id = 4;</code>
     * @return Whether the responseId field is set.
     */
    @java.lang.Override
    public boolean hasResponseId() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 response_id = 4;</code>
     * @return The responseId.
     */
    @java.lang.Override
    public int getResponseId() {
      return responseId_;
    }

    public static final int PROGRESS_FIELD_NUMBER = 5;
    private float progress_ = 0F;
    /**
     * <code>optional float progress = 5;</code>
     * @return Whether the progress field is set.
     */
    @java.lang.Override
    public boolean hasProgress() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional float progress = 5;</code>
     * @return The progress.
     */
    @java.lang.Override
    public float getProgress() {
      return progress_;
    }

    public static final int UPDATE_REQUESTS_FIELD_NUMBER = 7;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto> updateRequests_;
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto> getUpdateRequestsList() {
      return updateRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> 
        getUpdateRequestsOrBuilderList() {
      return updateRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
     */
    @java.lang.Override
    public int getUpdateRequestsCount() {
      return updateRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequests(int index) {
      return updateRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestsOrBuilder(
        int index) {
      return updateRequests_.get(index);
    }

    public static final int SCHEDULING_REQUESTS_FIELD_NUMBER = 10;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto> schedulingRequests_;
    /**
     * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto> getSchedulingRequestsList() {
      return schedulingRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> 
        getSchedulingRequestsOrBuilderList() {
      return schedulingRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
     */
    @java.lang.Override
    public int getSchedulingRequestsCount() {
      return schedulingRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getSchedulingRequests(int index) {
      return schedulingRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getSchedulingRequestsOrBuilder(
        int index) {
      return schedulingRequests_.get(index);
    }

    public static final int TRACKING_URL_FIELD_NUMBER = 11;
    @SuppressWarnings("serial")
    private volatile java.lang.Object trackingUrl_ = "";
    /**
     * <code>optional string tracking_url = 11;</code>
     * @return Whether the trackingUrl field is set.
     */
    @java.lang.Override
    public boolean hasTrackingUrl() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional string tracking_url = 11;</code>
     * @return The trackingUrl.
     */
    @java.lang.Override
    public java.lang.String getTrackingUrl() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          trackingUrl_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string tracking_url = 11;</code>
     * @return The bytes for trackingUrl.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        trackingUrl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getAskCount(); i++) {
        if (!getAsk(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getUpdateRequestsCount(); i++) {
        if (!getUpdateRequests(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getSchedulingRequestsCount(); i++) {
        if (!getSchedulingRequests(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < ask_.size(); i++) {
        output.writeMessage(1, ask_.get(i));
      }
      for (int i = 0; i < release_.size(); i++) {
        output.writeMessage(2, release_.get(i));
      }
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(3, getBlacklistRequest());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(4, responseId_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeFloat(5, progress_);
      }
      for (int i = 0; i < updateRequests_.size(); i++) {
        output.writeMessage(7, updateRequests_.get(i));
      }
      for (int i = 0; i < schedulingRequests_.size(); i++) {
        output.writeMessage(10, schedulingRequests_.get(i));
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 11, trackingUrl_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < ask_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, ask_.get(i));
      }
      for (int i = 0; i < release_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, release_.get(i));
      }
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, getBlacklistRequest());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(4, responseId_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(5, progress_);
      }
      for (int i = 0; i < updateRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, updateRequests_.get(i));
      }
      for (int i = 0; i < schedulingRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(10, schedulingRequests_.get(i));
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(11, trackingUrl_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto) obj;

      if (!getAskList()
          .equals(other.getAskList())) return false;
      if (!getReleaseList()
          .equals(other.getReleaseList())) return false;
      if (hasBlacklistRequest() != other.hasBlacklistRequest()) return false;
      if (hasBlacklistRequest()) {
        if (!getBlacklistRequest()
            .equals(other.getBlacklistRequest())) return false;
      }
      if (hasResponseId() != other.hasResponseId()) return false;
      if (hasResponseId()) {
        if (getResponseId()
            != other.getResponseId()) return false;
      }
      if (hasProgress() != other.hasProgress()) return false;
      if (hasProgress()) {
        if (java.lang.Float.floatToIntBits(getProgress())
            != java.lang.Float.floatToIntBits(
                other.getProgress())) return false;
      }
      if (!getUpdateRequestsList()
          .equals(other.getUpdateRequestsList())) return false;
      if (!getSchedulingRequestsList()
          .equals(other.getSchedulingRequestsList())) return false;
      if (hasTrackingUrl() != other.hasTrackingUrl()) return false;
      if (hasTrackingUrl()) {
        if (!getTrackingUrl()
            .equals(other.getTrackingUrl())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getAskCount() > 0) {
        hash = (37 * hash) + ASK_FIELD_NUMBER;
        hash = (53 * hash) + getAskList().hashCode();
      }
      if (getReleaseCount() > 0) {
        hash = (37 * hash) + RELEASE_FIELD_NUMBER;
        hash = (53 * hash) + getReleaseList().hashCode();
      }
      if (hasBlacklistRequest()) {
        hash = (37 * hash) + BLACKLIST_REQUEST_FIELD_NUMBER;
        hash = (53 * hash) + getBlacklistRequest().hashCode();
      }
      if (hasResponseId()) {
        hash = (37 * hash) + RESPONSE_ID_FIELD_NUMBER;
        hash = (53 * hash) + getResponseId();
      }
      if (hasProgress()) {
        hash = (37 * hash) + PROGRESS_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getProgress());
      }
      if (getUpdateRequestsCount() > 0) {
        hash = (37 * hash) + UPDATE_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getUpdateRequestsList().hashCode();
      }
      if (getSchedulingRequestsCount() > 0) {
        hash = (37 * hash) + SCHEDULING_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getSchedulingRequestsList().hashCode();
      }
      if (hasTrackingUrl()) {
        hash = (37 * hash) + TRACKING_URL_FIELD_NUMBER;
        hash = (53 * hash) + getTrackingUrl().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.AllocateRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.AllocateRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getAskFieldBuilder();
          getReleaseFieldBuilder();
          getBlacklistRequestFieldBuilder();
          getUpdateRequestsFieldBuilder();
          getSchedulingRequestsFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (askBuilder_ == null) {
          ask_ = java.util.Collections.emptyList();
        } else {
          ask_ = null;
          askBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        if (releaseBuilder_ == null) {
          release_ = java.util.Collections.emptyList();
        } else {
          release_ = null;
          releaseBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        blacklistRequest_ = null;
        if (blacklistRequestBuilder_ != null) {
          blacklistRequestBuilder_.dispose();
          blacklistRequestBuilder_ = null;
        }
        responseId_ = 0;
        progress_ = 0F;
        if (updateRequestsBuilder_ == null) {
          updateRequests_ = java.util.Collections.emptyList();
        } else {
          updateRequests_ = null;
          updateRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        if (schedulingRequestsBuilder_ == null) {
          schedulingRequests_ = java.util.Collections.emptyList();
        } else {
          schedulingRequests_ = null;
          schedulingRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000040);
        trackingUrl_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto result) {
        if (askBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            ask_ = java.util.Collections.unmodifiableList(ask_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.ask_ = ask_;
        } else {
          result.ask_ = askBuilder_.build();
        }
        if (releaseBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            release_ = java.util.Collections.unmodifiableList(release_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.release_ = release_;
        } else {
          result.release_ = releaseBuilder_.build();
        }
        if (updateRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0)) {
            updateRequests_ = java.util.Collections.unmodifiableList(updateRequests_);
            bitField0_ = (bitField0_ & ~0x00000020);
          }
          result.updateRequests_ = updateRequests_;
        } else {
          result.updateRequests_ = updateRequestsBuilder_.build();
        }
        if (schedulingRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0)) {
            schedulingRequests_ = java.util.Collections.unmodifiableList(schedulingRequests_);
            bitField0_ = (bitField0_ & ~0x00000040);
          }
          result.schedulingRequests_ = schedulingRequests_;
        } else {
          result.schedulingRequests_ = schedulingRequestsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.blacklistRequest_ = blacklistRequestBuilder_ == null
              ? blacklistRequest_
              : blacklistRequestBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.responseId_ = responseId_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.progress_ = progress_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.trackingUrl_ = trackingUrl_;
          to_bitField0_ |= 0x00000008;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance()) return this;
        if (askBuilder_ == null) {
          if (!other.ask_.isEmpty()) {
            if (ask_.isEmpty()) {
              ask_ = other.ask_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureAskIsMutable();
              ask_.addAll(other.ask_);
            }
            onChanged();
          }
        } else {
          if (!other.ask_.isEmpty()) {
            if (askBuilder_.isEmpty()) {
              askBuilder_.dispose();
              askBuilder_ = null;
              ask_ = other.ask_;
              bitField0_ = (bitField0_ & ~0x00000001);
              askBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getAskFieldBuilder() : null;
            } else {
              askBuilder_.addAllMessages(other.ask_);
            }
          }
        }
        if (releaseBuilder_ == null) {
          if (!other.release_.isEmpty()) {
            if (release_.isEmpty()) {
              release_ = other.release_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureReleaseIsMutable();
              release_.addAll(other.release_);
            }
            onChanged();
          }
        } else {
          if (!other.release_.isEmpty()) {
            if (releaseBuilder_.isEmpty()) {
              releaseBuilder_.dispose();
              releaseBuilder_ = null;
              release_ = other.release_;
              bitField0_ = (bitField0_ & ~0x00000002);
              releaseBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getReleaseFieldBuilder() : null;
            } else {
              releaseBuilder_.addAllMessages(other.release_);
            }
          }
        }
        if (other.hasBlacklistRequest()) {
          mergeBlacklistRequest(other.getBlacklistRequest());
        }
        if (other.hasResponseId()) {
          setResponseId(other.getResponseId());
        }
        if (other.hasProgress()) {
          setProgress(other.getProgress());
        }
        if (updateRequestsBuilder_ == null) {
          if (!other.updateRequests_.isEmpty()) {
            if (updateRequests_.isEmpty()) {
              updateRequests_ = other.updateRequests_;
              bitField0_ = (bitField0_ & ~0x00000020);
            } else {
              ensureUpdateRequestsIsMutable();
              updateRequests_.addAll(other.updateRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.updateRequests_.isEmpty()) {
            if (updateRequestsBuilder_.isEmpty()) {
              updateRequestsBuilder_.dispose();
              updateRequestsBuilder_ = null;
              updateRequests_ = other.updateRequests_;
              bitField0_ = (bitField0_ & ~0x00000020);
              updateRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getUpdateRequestsFieldBuilder() : null;
            } else {
              updateRequestsBuilder_.addAllMessages(other.updateRequests_);
            }
          }
        }
        if (schedulingRequestsBuilder_ == null) {
          if (!other.schedulingRequests_.isEmpty()) {
            if (schedulingRequests_.isEmpty()) {
              schedulingRequests_ = other.schedulingRequests_;
              bitField0_ = (bitField0_ & ~0x00000040);
            } else {
              ensureSchedulingRequestsIsMutable();
              schedulingRequests_.addAll(other.schedulingRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.schedulingRequests_.isEmpty()) {
            if (schedulingRequestsBuilder_.isEmpty()) {
              schedulingRequestsBuilder_.dispose();
              schedulingRequestsBuilder_ = null;
              schedulingRequests_ = other.schedulingRequests_;
              bitField0_ = (bitField0_ & ~0x00000040);
              schedulingRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getSchedulingRequestsFieldBuilder() : null;
            } else {
              schedulingRequestsBuilder_.addAllMessages(other.schedulingRequests_);
            }
          }
        }
        if (other.hasTrackingUrl()) {
          trackingUrl_ = other.trackingUrl_;
          bitField0_ |= 0x00000080;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getAskCount(); i++) {
          if (!getAsk(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getUpdateRequestsCount(); i++) {
          if (!getUpdateRequests(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getSchedulingRequestsCount(); i++) {
          if (!getSchedulingRequests(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.PARSER,
                        extensionRegistry);
                if (askBuilder_ == null) {
                  ensureAskIsMutable();
                  ask_.add(m);
                } else {
                  askBuilder_.addMessage(m);
                }
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER,
                        extensionRegistry);
                if (releaseBuilder_ == null) {
                  ensureReleaseIsMutable();
                  release_.add(m);
                } else {
                  releaseBuilder_.addMessage(m);
                }
                break;
              } // case 18
              case 26: {
                input.readMessage(
                    getBlacklistRequestFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 32: {
                responseId_ = input.readInt32();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              case 45: {
                progress_ = input.readFloat();
                bitField0_ |= 0x00000010;
                break;
              } // case 45
              case 58: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.PARSER,
                        extensionRegistry);
                if (updateRequestsBuilder_ == null) {
                  ensureUpdateRequestsIsMutable();
                  updateRequests_.add(m);
                } else {
                  updateRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 58
              case 82: {
                org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.PARSER,
                        extensionRegistry);
                if (schedulingRequestsBuilder_ == null) {
                  ensureSchedulingRequestsIsMutable();
                  schedulingRequests_.add(m);
                } else {
                  schedulingRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 82
              case 90: {
                trackingUrl_ = input.readBytes();
                bitField0_ |= 0x00000080;
                break;
              } // case 90
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> ask_ =
        java.util.Collections.emptyList();
      private void ensureAskIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          ask_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto>(ask_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> askBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> getAskList() {
        if (askBuilder_ == null) {
          return java.util.Collections.unmodifiableList(ask_);
        } else {
          return askBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public int getAskCount() {
        if (askBuilder_ == null) {
          return ask_.size();
        } else {
          return askBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAsk(int index) {
        if (askBuilder_ == null) {
          return ask_.get(index);
        } else {
          return askBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public Builder setAsk(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) {
        if (askBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAskIsMutable();
          ask_.set(index, value);
          onChanged();
        } else {
          askBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public Builder setAsk(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) {
        if (askBuilder_ == null) {
          ensureAskIsMutable();
          ask_.set(index, builderForValue.build());
          onChanged();
        } else {
          askBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public Builder addAsk(org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) {
        if (askBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAskIsMutable();
          ask_.add(value);
          onChanged();
        } else {
          askBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public Builder addAsk(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) {
        if (askBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAskIsMutable();
          ask_.add(index, value);
          onChanged();
        } else {
          askBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public Builder addAsk(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) {
        if (askBuilder_ == null) {
          ensureAskIsMutable();
          ask_.add(builderForValue.build());
          onChanged();
        } else {
          askBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public Builder addAsk(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) {
        if (askBuilder_ == null) {
          ensureAskIsMutable();
          ask_.add(index, builderForValue.build());
          onChanged();
        } else {
          askBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public Builder addAllAsk(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> values) {
        if (askBuilder_ == null) {
          ensureAskIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, ask_);
          onChanged();
        } else {
          askBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public Builder clearAsk() {
        if (askBuilder_ == null) {
          ask_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          askBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public Builder removeAsk(int index) {
        if (askBuilder_ == null) {
          ensureAskIsMutable();
          ask_.remove(index);
          onChanged();
        } else {
          askBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder getAskBuilder(
          int index) {
        return getAskFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAskOrBuilder(
          int index) {
        if (askBuilder_ == null) {
          return ask_.get(index);  } else {
          return askBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> 
           getAskOrBuilderList() {
        if (askBuilder_ != null) {
          return askBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(ask_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder addAskBuilder() {
        return getAskFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder addAskBuilder(
          int index) {
        return getAskFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto ask = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder> 
           getAskBuilderList() {
        return getAskFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> 
          getAskFieldBuilder() {
        if (askBuilder_ == null) {
          askBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder>(
                  ask_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          ask_ = null;
        }
        return askBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> release_ =
        java.util.Collections.emptyList();
      private void ensureReleaseIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          release_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto>(release_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> releaseBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getReleaseList() {
        if (releaseBuilder_ == null) {
          return java.util.Collections.unmodifiableList(release_);
        } else {
          return releaseBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public int getReleaseCount() {
        if (releaseBuilder_ == null) {
          return release_.size();
        } else {
          return releaseBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getRelease(int index) {
        if (releaseBuilder_ == null) {
          return release_.get(index);
        } else {
          return releaseBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public Builder setRelease(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (releaseBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureReleaseIsMutable();
          release_.set(index, value);
          onChanged();
        } else {
          releaseBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public Builder setRelease(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (releaseBuilder_ == null) {
          ensureReleaseIsMutable();
          release_.set(index, builderForValue.build());
          onChanged();
        } else {
          releaseBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public Builder addRelease(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (releaseBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureReleaseIsMutable();
          release_.add(value);
          onChanged();
        } else {
          releaseBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public Builder addRelease(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (releaseBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureReleaseIsMutable();
          release_.add(index, value);
          onChanged();
        } else {
          releaseBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public Builder addRelease(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (releaseBuilder_ == null) {
          ensureReleaseIsMutable();
          release_.add(builderForValue.build());
          onChanged();
        } else {
          releaseBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public Builder addRelease(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (releaseBuilder_ == null) {
          ensureReleaseIsMutable();
          release_.add(index, builderForValue.build());
          onChanged();
        } else {
          releaseBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public Builder addAllRelease(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> values) {
        if (releaseBuilder_ == null) {
          ensureReleaseIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, release_);
          onChanged();
        } else {
          releaseBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public Builder clearRelease() {
        if (releaseBuilder_ == null) {
          release_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          releaseBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public Builder removeRelease(int index) {
        if (releaseBuilder_ == null) {
          ensureReleaseIsMutable();
          release_.remove(index);
          onChanged();
        } else {
          releaseBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getReleaseBuilder(
          int index) {
        return getReleaseFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getReleaseOrBuilder(
          int index) {
        if (releaseBuilder_ == null) {
          return release_.get(index);  } else {
          return releaseBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
           getReleaseOrBuilderList() {
        if (releaseBuilder_ != null) {
          return releaseBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(release_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addReleaseBuilder() {
        return getReleaseFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addReleaseBuilder(
          int index) {
        return getReleaseFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto release = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder> 
           getReleaseBuilderList() {
        return getReleaseFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getReleaseFieldBuilder() {
        if (releaseBuilder_ == null) {
          releaseBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  release_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          release_ = null;
        }
        return releaseBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto blacklistRequest_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder> blacklistRequestBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
       * @return Whether the blacklistRequest field is set.
       */
      public boolean hasBlacklistRequest() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
       * @return The blacklistRequest.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getBlacklistRequest() {
        if (blacklistRequestBuilder_ == null) {
          return blacklistRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance() : blacklistRequest_;
        } else {
          return blacklistRequestBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
       */
      public Builder setBlacklistRequest(org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto value) {
        if (blacklistRequestBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          blacklistRequest_ = value;
        } else {
          blacklistRequestBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
       */
      public Builder setBlacklistRequest(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder builderForValue) {
        if (blacklistRequestBuilder_ == null) {
          blacklistRequest_ = builderForValue.build();
        } else {
          blacklistRequestBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
       */
      public Builder mergeBlacklistRequest(org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto value) {
        if (blacklistRequestBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0) &&
            blacklistRequest_ != null &&
            blacklistRequest_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance()) {
            getBlacklistRequestBuilder().mergeFrom(value);
          } else {
            blacklistRequest_ = value;
          }
        } else {
          blacklistRequestBuilder_.mergeFrom(value);
        }
        if (blacklistRequest_ != null) {
          bitField0_ |= 0x00000004;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
       */
      public Builder clearBlacklistRequest() {
        bitField0_ = (bitField0_ & ~0x00000004);
        blacklistRequest_ = null;
        if (blacklistRequestBuilder_ != null) {
          blacklistRequestBuilder_.dispose();
          blacklistRequestBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder getBlacklistRequestBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getBlacklistRequestFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder getBlacklistRequestOrBuilder() {
        if (blacklistRequestBuilder_ != null) {
          return blacklistRequestBuilder_.getMessageOrBuilder();
        } else {
          return blacklistRequest_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance() : blacklistRequest_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder> 
          getBlacklistRequestFieldBuilder() {
        if (blacklistRequestBuilder_ == null) {
          blacklistRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder>(
                  getBlacklistRequest(),
                  getParentForChildren(),
                  isClean());
          blacklistRequest_ = null;
        }
        return blacklistRequestBuilder_;
      }

      private int responseId_ ;
      /**
       * <code>optional int32 response_id = 4;</code>
       * @return Whether the responseId field is set.
       */
      @java.lang.Override
      public boolean hasResponseId() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int32 response_id = 4;</code>
       * @return The responseId.
       */
      @java.lang.Override
      public int getResponseId() {
        return responseId_;
      }
      /**
       * <code>optional int32 response_id = 4;</code>
       * @param value The responseId to set.
       * @return This builder for chaining.
       */
      public Builder setResponseId(int value) {

        responseId_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 response_id = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearResponseId() {
        bitField0_ = (bitField0_ & ~0x00000008);
        responseId_ = 0;
        onChanged();
        return this;
      }

      private float progress_ ;
      /**
       * <code>optional float progress = 5;</code>
       * @return Whether the progress field is set.
       */
      @java.lang.Override
      public boolean hasProgress() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional float progress = 5;</code>
       * @return The progress.
       */
      @java.lang.Override
      public float getProgress() {
        return progress_;
      }
      /**
       * <code>optional float progress = 5;</code>
       * @param value The progress to set.
       * @return This builder for chaining.
       */
      public Builder setProgress(float value) {

        progress_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional float progress = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearProgress() {
        bitField0_ = (bitField0_ & ~0x00000010);
        progress_ = 0F;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto> updateRequests_ =
        java.util.Collections.emptyList();
      private void ensureUpdateRequestsIsMutable() {
        if (!((bitField0_ & 0x00000020) != 0)) {
          updateRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto>(updateRequests_);
          bitField0_ |= 0x00000020;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> updateRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto> getUpdateRequestsList() {
        if (updateRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(updateRequests_);
        } else {
          return updateRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public int getUpdateRequestsCount() {
        if (updateRequestsBuilder_ == null) {
          return updateRequests_.size();
        } else {
          return updateRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequests(int index) {
        if (updateRequestsBuilder_ == null) {
          return updateRequests_.get(index);
        } else {
          return updateRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public Builder setUpdateRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto value) {
        if (updateRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdateRequestsIsMutable();
          updateRequests_.set(index, value);
          onChanged();
        } else {
          updateRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public Builder setUpdateRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder builderForValue) {
        if (updateRequestsBuilder_ == null) {
          ensureUpdateRequestsIsMutable();
          updateRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          updateRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public Builder addUpdateRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto value) {
        if (updateRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdateRequestsIsMutable();
          updateRequests_.add(value);
          onChanged();
        } else {
          updateRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public Builder addUpdateRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto value) {
        if (updateRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdateRequestsIsMutable();
          updateRequests_.add(index, value);
          onChanged();
        } else {
          updateRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public Builder addUpdateRequests(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder builderForValue) {
        if (updateRequestsBuilder_ == null) {
          ensureUpdateRequestsIsMutable();
          updateRequests_.add(builderForValue.build());
          onChanged();
        } else {
          updateRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public Builder addUpdateRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder builderForValue) {
        if (updateRequestsBuilder_ == null) {
          ensureUpdateRequestsIsMutable();
          updateRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          updateRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public Builder addAllUpdateRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto> values) {
        if (updateRequestsBuilder_ == null) {
          ensureUpdateRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, updateRequests_);
          onChanged();
        } else {
          updateRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public Builder clearUpdateRequests() {
        if (updateRequestsBuilder_ == null) {
          updateRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000020);
          onChanged();
        } else {
          updateRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public Builder removeUpdateRequests(int index) {
        if (updateRequestsBuilder_ == null) {
          ensureUpdateRequestsIsMutable();
          updateRequests_.remove(index);
          onChanged();
        } else {
          updateRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder getUpdateRequestsBuilder(
          int index) {
        return getUpdateRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestsOrBuilder(
          int index) {
        if (updateRequestsBuilder_ == null) {
          return updateRequests_.get(index);  } else {
          return updateRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> 
           getUpdateRequestsOrBuilderList() {
        if (updateRequestsBuilder_ != null) {
          return updateRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(updateRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder addUpdateRequestsBuilder() {
        return getUpdateRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder addUpdateRequestsBuilder(
          int index) {
        return getUpdateRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder> 
           getUpdateRequestsBuilderList() {
        return getUpdateRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> 
          getUpdateRequestsFieldBuilder() {
        if (updateRequestsBuilder_ == null) {
          updateRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder>(
                  updateRequests_,
                  ((bitField0_ & 0x00000020) != 0),
                  getParentForChildren(),
                  isClean());
          updateRequests_ = null;
        }
        return updateRequestsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto> schedulingRequests_ =
        java.util.Collections.emptyList();
      private void ensureSchedulingRequestsIsMutable() {
        if (!((bitField0_ & 0x00000040) != 0)) {
          schedulingRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto>(schedulingRequests_);
          bitField0_ |= 0x00000040;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> schedulingRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto> getSchedulingRequestsList() {
        if (schedulingRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(schedulingRequests_);
        } else {
          return schedulingRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public int getSchedulingRequestsCount() {
        if (schedulingRequestsBuilder_ == null) {
          return schedulingRequests_.size();
        } else {
          return schedulingRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getSchedulingRequests(int index) {
        if (schedulingRequestsBuilder_ == null) {
          return schedulingRequests_.get(index);
        } else {
          return schedulingRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public Builder setSchedulingRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto value) {
        if (schedulingRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSchedulingRequestsIsMutable();
          schedulingRequests_.set(index, value);
          onChanged();
        } else {
          schedulingRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public Builder setSchedulingRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder builderForValue) {
        if (schedulingRequestsBuilder_ == null) {
          ensureSchedulingRequestsIsMutable();
          schedulingRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          schedulingRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public Builder addSchedulingRequests(org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto value) {
        if (schedulingRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSchedulingRequestsIsMutable();
          schedulingRequests_.add(value);
          onChanged();
        } else {
          schedulingRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public Builder addSchedulingRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto value) {
        if (schedulingRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSchedulingRequestsIsMutable();
          schedulingRequests_.add(index, value);
          onChanged();
        } else {
          schedulingRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public Builder addSchedulingRequests(
          org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder builderForValue) {
        if (schedulingRequestsBuilder_ == null) {
          ensureSchedulingRequestsIsMutable();
          schedulingRequests_.add(builderForValue.build());
          onChanged();
        } else {
          schedulingRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public Builder addSchedulingRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder builderForValue) {
        if (schedulingRequestsBuilder_ == null) {
          ensureSchedulingRequestsIsMutable();
          schedulingRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          schedulingRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public Builder addAllSchedulingRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto> values) {
        if (schedulingRequestsBuilder_ == null) {
          ensureSchedulingRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, schedulingRequests_);
          onChanged();
        } else {
          schedulingRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public Builder clearSchedulingRequests() {
        if (schedulingRequestsBuilder_ == null) {
          schedulingRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000040);
          onChanged();
        } else {
          schedulingRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public Builder removeSchedulingRequests(int index) {
        if (schedulingRequestsBuilder_ == null) {
          ensureSchedulingRequestsIsMutable();
          schedulingRequests_.remove(index);
          onChanged();
        } else {
          schedulingRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder getSchedulingRequestsBuilder(
          int index) {
        return getSchedulingRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getSchedulingRequestsOrBuilder(
          int index) {
        if (schedulingRequestsBuilder_ == null) {
          return schedulingRequests_.get(index);  } else {
          return schedulingRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> 
           getSchedulingRequestsOrBuilderList() {
        if (schedulingRequestsBuilder_ != null) {
          return schedulingRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(schedulingRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder addSchedulingRequestsBuilder() {
        return getSchedulingRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder addSchedulingRequestsBuilder(
          int index) {
        return getSchedulingRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder> 
           getSchedulingRequestsBuilderList() {
        return getSchedulingRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> 
          getSchedulingRequestsFieldBuilder() {
        if (schedulingRequestsBuilder_ == null) {
          schedulingRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder>(
                  schedulingRequests_,
                  ((bitField0_ & 0x00000040) != 0),
                  getParentForChildren(),
                  isClean());
          schedulingRequests_ = null;
        }
        return schedulingRequestsBuilder_;
      }

      private java.lang.Object trackingUrl_ = "";
      /**
       * <code>optional string tracking_url = 11;</code>
       * @return Whether the trackingUrl field is set.
       */
      public boolean hasTrackingUrl() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional string tracking_url = 11;</code>
       * @return The trackingUrl.
       */
      public java.lang.String getTrackingUrl() {
        java.lang.Object ref = trackingUrl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            trackingUrl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string tracking_url = 11;</code>
       * @return The bytes for trackingUrl.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTrackingUrlBytes() {
        java.lang.Object ref = trackingUrl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          trackingUrl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string tracking_url = 11;</code>
       * @param value The trackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setTrackingUrl(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        trackingUrl_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional string tracking_url = 11;</code>
       * @return This builder for chaining.
       */
      public Builder clearTrackingUrl() {
        trackingUrl_ = getDefaultInstance().getTrackingUrl();
        bitField0_ = (bitField0_ & ~0x00000080);
        onChanged();
        return this;
      }
      /**
       * <code>optional string tracking_url = 11;</code>
       * @param value The bytes for trackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setTrackingUrlBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        trackingUrl_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.AllocateRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.AllocateRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<AllocateRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<AllocateRequestProto>() {
      @java.lang.Override
      public AllocateRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<AllocateRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<AllocateRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NMTokenProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NMTokenProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return Whether the nodeId field is set.
     */
    boolean hasNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return The nodeId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();

    /**
     * <code>optional .hadoop.common.TokenProto token = 2;</code>
     * @return Whether the token field is set.
     */
    boolean hasToken();
    /**
     * <code>optional .hadoop.common.TokenProto token = 2;</code>
     * @return The token.
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProto getToken();
    /**
     * <code>optional .hadoop.common.TokenProto token = 2;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getTokenOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NMTokenProto}
   */
  public static final class NMTokenProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NMTokenProto)
      NMTokenProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NMTokenProto.newBuilder() to construct.
    private NMTokenProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NMTokenProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NMTokenProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_NMTokenProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_NMTokenProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder.class);
    }

    private int bitField0_;
    public static final int NODEID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return Whether the nodeId field is set.
     */
    @java.lang.Override
    public boolean hasNodeId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return The nodeId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }

    public static final int TOKEN_FIELD_NUMBER = 2;
    private org.apache.hadoop.security.proto.SecurityProtos.TokenProto token_;
    /**
     * <code>optional .hadoop.common.TokenProto token = 2;</code>
     * @return Whether the token field is set.
     */
    @java.lang.Override
    public boolean hasToken() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.common.TokenProto token = 2;</code>
     * @return The token.
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getToken() {
      return token_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : token_;
    }
    /**
     * <code>optional .hadoop.common.TokenProto token = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getTokenOrBuilder() {
      return token_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : token_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasToken()) {
        if (!getToken().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getNodeId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getToken());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getNodeId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getToken());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto) obj;

      if (hasNodeId() != other.hasNodeId()) return false;
      if (hasNodeId()) {
        if (!getNodeId()
            .equals(other.getNodeId())) return false;
      }
      if (hasToken() != other.hasToken()) return false;
      if (hasToken()) {
        if (!getToken()
            .equals(other.getToken())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNodeId()) {
        hash = (37 * hash) + NODEID_FIELD_NUMBER;
        hash = (53 * hash) + getNodeId().hashCode();
      }
      if (hasToken()) {
        hash = (37 * hash) + TOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getToken().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NMTokenProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NMTokenProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_NMTokenProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_NMTokenProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getNodeIdFieldBuilder();
          getTokenFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        token_ = null;
        if (tokenBuilder_ != null) {
          tokenBuilder_.dispose();
          tokenBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_NMTokenProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.nodeId_ = nodeIdBuilder_ == null
              ? nodeId_
              : nodeIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.token_ = tokenBuilder_ == null
              ? token_
              : tokenBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance()) return this;
        if (other.hasNodeId()) {
          mergeNodeId(other.getNodeId());
        }
        if (other.hasToken()) {
          mergeToken(other.getToken());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasToken()) {
          if (!getToken().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getNodeIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getTokenFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       * @return Whether the nodeId field is set.
       */
      public boolean hasNodeId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       * @return The nodeId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
        if (nodeIdBuilder_ == null) {
          return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        } else {
          return nodeIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeId_ = value;
        } else {
          nodeIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder setNodeId(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (nodeIdBuilder_ == null) {
          nodeId_ = builderForValue.build();
        } else {
          nodeIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            nodeId_ != null &&
            nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
            getNodeIdBuilder().mergeFrom(value);
          } else {
            nodeId_ = value;
          }
        } else {
          nodeIdBuilder_.mergeFrom(value);
        }
        if (nodeId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder clearNodeId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getNodeIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
        if (nodeIdBuilder_ != null) {
          return nodeIdBuilder_.getMessageOrBuilder();
        } else {
          return nodeId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
          getNodeIdFieldBuilder() {
        if (nodeIdBuilder_ == null) {
          nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
                  getNodeId(),
                  getParentForChildren(),
                  isClean());
          nodeId_ = null;
        }
        return nodeIdBuilder_;
      }

      private org.apache.hadoop.security.proto.SecurityProtos.TokenProto token_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> tokenBuilder_;
      /**
       * <code>optional .hadoop.common.TokenProto token = 2;</code>
       * @return Whether the token field is set.
       */
      public boolean hasToken() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.common.TokenProto token = 2;</code>
       * @return The token.
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getToken() {
        if (tokenBuilder_ == null) {
          return token_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : token_;
        } else {
          return tokenBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto token = 2;</code>
       */
      public Builder setToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (tokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          token_ = value;
        } else {
          tokenBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto token = 2;</code>
       */
      public Builder setToken(
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (tokenBuilder_ == null) {
          token_ = builderForValue.build();
        } else {
          tokenBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto token = 2;</code>
       */
      public Builder mergeToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (tokenBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            token_ != null &&
            token_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) {
            getTokenBuilder().mergeFrom(value);
          } else {
            token_ = value;
          }
        } else {
          tokenBuilder_.mergeFrom(value);
        }
        if (token_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto token = 2;</code>
       */
      public Builder clearToken() {
        bitField0_ = (bitField0_ & ~0x00000002);
        token_ = null;
        if (tokenBuilder_ != null) {
          tokenBuilder_.dispose();
          tokenBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto token = 2;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getTokenBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getTokenFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.common.TokenProto token = 2;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getTokenOrBuilder() {
        if (tokenBuilder_ != null) {
          return tokenBuilder_.getMessageOrBuilder();
        } else {
          return token_ == null ?
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : token_;
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto token = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
          getTokenFieldBuilder() {
        if (tokenBuilder_ == null) {
          tokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>(
                  getToken(),
                  getParentForChildren(),
                  isClean());
          token_ = null;
        }
        return tokenBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NMTokenProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NMTokenProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NMTokenProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NMTokenProto>() {
      @java.lang.Override
      public NMTokenProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NMTokenProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NMTokenProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface UpdatedContainerProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdatedContainerProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1;</code>
     * @return Whether the updateType field is set.
     */
    boolean hasUpdateType();
    /**
     * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1;</code>
     * @return The updateType.
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType();

    /**
     * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
     * @return Whether the container field is set.
     */
    boolean hasContainer();
    /**
     * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
     * @return The container.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainer();
    /**
     * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainerOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.UpdatedContainerProto}
   */
  public static final class UpdatedContainerProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdatedContainerProto)
      UpdatedContainerProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use UpdatedContainerProto.newBuilder() to construct.
    private UpdatedContainerProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private UpdatedContainerProto() {
      updateType_ = 0;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new UpdatedContainerProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdatedContainerProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdatedContainerProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder.class);
    }

    private int bitField0_;
    public static final int UPDATE_TYPE_FIELD_NUMBER = 1;
    private int updateType_ = 0;
    /**
     * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1;</code>
     * @return Whether the updateType field is set.
     */
    @java.lang.Override public boolean hasUpdateType() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1;</code>
     * @return The updateType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType() {
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.forNumber(updateType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.INCREASE_RESOURCE : result;
    }

    public static final int CONTAINER_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto container_;
    /**
     * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
     * @return Whether the container field is set.
     */
    @java.lang.Override
    public boolean hasContainer() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
     * @return The container.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainer() {
      return container_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance() : container_;
    }
    /**
     * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainerOrBuilder() {
      return container_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance() : container_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasUpdateType()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasContainer()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getContainer().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, updateType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getContainer());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, updateType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getContainer());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto) obj;

      if (hasUpdateType() != other.hasUpdateType()) return false;
      if (hasUpdateType()) {
        if (updateType_ != other.updateType_) return false;
      }
      if (hasContainer() != other.hasContainer()) return false;
      if (hasContainer()) {
        if (!getContainer()
            .equals(other.getContainer())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasUpdateType()) {
        hash = (37 * hash) + UPDATE_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + updateType_;
      }
      if (hasContainer()) {
        hash = (37 * hash) + CONTAINER_FIELD_NUMBER;
        hash = (53 * hash) + getContainer().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.UpdatedContainerProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdatedContainerProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdatedContainerProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdatedContainerProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        updateType_ = 0;
        container_ = null;
        if (containerBuilder_ != null) {
          containerBuilder_.dispose();
          containerBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdatedContainerProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.updateType_ = updateType_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.container_ = containerBuilder_ == null
              ? container_
              : containerBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.getDefaultInstance()) return this;
        if (other.hasUpdateType()) {
          setUpdateType(other.getUpdateType());
        }
        if (other.hasContainer()) {
          mergeContainer(other.getContainer());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasUpdateType()) {
          return false;
        }
        if (!hasContainer()) {
          return false;
        }
        if (!getContainer().isInitialized()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  updateType_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 18: {
                input.readMessage(
                    getContainerFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int updateType_ = 0;
      /**
       * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1;</code>
       * @return Whether the updateType field is set.
       */
      @java.lang.Override public boolean hasUpdateType() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1;</code>
       * @return The updateType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.forNumber(updateType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.INCREASE_RESOURCE : result;
      }
      /**
       * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1;</code>
       * @param value The updateType to set.
       * @return This builder for chaining.
       */
      public Builder setUpdateType(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        updateType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearUpdateType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        updateType_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto container_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> containerBuilder_;
      /**
       * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
       * @return Whether the container field is set.
       */
      public boolean hasContainer() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
       * @return The container.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainer() {
        if (containerBuilder_ == null) {
          return container_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance() : container_;
        } else {
          return containerBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
       */
      public Builder setContainer(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (containerBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          container_ = value;
        } else {
          containerBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
       */
      public Builder setContainer(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (containerBuilder_ == null) {
          container_ = builderForValue.build();
        } else {
          containerBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
       */
      public Builder mergeContainer(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (containerBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            container_ != null &&
            container_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance()) {
            getContainerBuilder().mergeFrom(value);
          } else {
            container_ = value;
          }
        } else {
          containerBuilder_.mergeFrom(value);
        }
        if (container_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
       */
      public Builder clearContainer() {
        bitField0_ = (bitField0_ & ~0x00000002);
        container_ = null;
        if (containerBuilder_ != null) {
          containerBuilder_.dispose();
          containerBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getContainerBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getContainerFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainerOrBuilder() {
        if (containerBuilder_ != null) {
          return containerBuilder_.getMessageOrBuilder();
        } else {
          return container_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance() : container_;
        }
      }
      /**
       * <code>required .hadoop.yarn.ContainerProto container = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
          getContainerFieldBuilder() {
        if (containerBuilder_ == null) {
          containerBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>(
                  getContainer(),
                  getParentForChildren(),
                  isClean());
          container_ = null;
        }
        return containerBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdatedContainerProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdatedContainerProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<UpdatedContainerProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<UpdatedContainerProto>() {
      @java.lang.Override
      public UpdatedContainerProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<UpdatedContainerProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<UpdatedContainerProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface EnhancedHeadroomProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.EnhancedHeadroomProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int32 total_pending_count = 1;</code>
     * @return Whether the totalPendingCount field is set.
     */
    boolean hasTotalPendingCount();
    /**
     * <code>optional int32 total_pending_count = 1;</code>
     * @return The totalPendingCount.
     */
    int getTotalPendingCount();

    /**
     * <code>optional int32 total_active_cores = 2;</code>
     * @return Whether the totalActiveCores field is set.
     */
    boolean hasTotalActiveCores();
    /**
     * <code>optional int32 total_active_cores = 2;</code>
     * @return The totalActiveCores.
     */
    int getTotalActiveCores();
  }
  /**
   * Protobuf type {@code hadoop.yarn.EnhancedHeadroomProto}
   */
  public static final class EnhancedHeadroomProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.EnhancedHeadroomProto)
      EnhancedHeadroomProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use EnhancedHeadroomProto.newBuilder() to construct.
    private EnhancedHeadroomProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private EnhancedHeadroomProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new EnhancedHeadroomProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_EnhancedHeadroomProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_EnhancedHeadroomProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.Builder.class);
    }

    private int bitField0_;
    public static final int TOTAL_PENDING_COUNT_FIELD_NUMBER = 1;
    private int totalPendingCount_ = 0;
    /**
     * <code>optional int32 total_pending_count = 1;</code>
     * @return Whether the totalPendingCount field is set.
     */
    @java.lang.Override
    public boolean hasTotalPendingCount() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int32 total_pending_count = 1;</code>
     * @return The totalPendingCount.
     */
    @java.lang.Override
    public int getTotalPendingCount() {
      return totalPendingCount_;
    }

    public static final int TOTAL_ACTIVE_CORES_FIELD_NUMBER = 2;
    private int totalActiveCores_ = 0;
    /**
     * <code>optional int32 total_active_cores = 2;</code>
     * @return Whether the totalActiveCores field is set.
     */
    @java.lang.Override
    public boolean hasTotalActiveCores() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 total_active_cores = 2;</code>
     * @return The totalActiveCores.
     */
    @java.lang.Override
    public int getTotalActiveCores() {
      return totalActiveCores_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, totalPendingCount_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, totalActiveCores_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, totalPendingCount_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, totalActiveCores_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto) obj;

      if (hasTotalPendingCount() != other.hasTotalPendingCount()) return false;
      if (hasTotalPendingCount()) {
        if (getTotalPendingCount()
            != other.getTotalPendingCount()) return false;
      }
      if (hasTotalActiveCores() != other.hasTotalActiveCores()) return false;
      if (hasTotalActiveCores()) {
        if (getTotalActiveCores()
            != other.getTotalActiveCores()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasTotalPendingCount()) {
        hash = (37 * hash) + TOTAL_PENDING_COUNT_FIELD_NUMBER;
        hash = (53 * hash) + getTotalPendingCount();
      }
      if (hasTotalActiveCores()) {
        hash = (37 * hash) + TOTAL_ACTIVE_CORES_FIELD_NUMBER;
        hash = (53 * hash) + getTotalActiveCores();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.EnhancedHeadroomProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.EnhancedHeadroomProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_EnhancedHeadroomProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_EnhancedHeadroomProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        totalPendingCount_ = 0;
        totalActiveCores_ = 0;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_EnhancedHeadroomProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.totalPendingCount_ = totalPendingCount_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.totalActiveCores_ = totalActiveCores_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.getDefaultInstance()) return this;
        if (other.hasTotalPendingCount()) {
          setTotalPendingCount(other.getTotalPendingCount());
        }
        if (other.hasTotalActiveCores()) {
          setTotalActiveCores(other.getTotalActiveCores());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                totalPendingCount_ = input.readInt32();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 16: {
                totalActiveCores_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int totalPendingCount_ ;
      /**
       * <code>optional int32 total_pending_count = 1;</code>
       * @return Whether the totalPendingCount field is set.
       */
      @java.lang.Override
      public boolean hasTotalPendingCount() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int32 total_pending_count = 1;</code>
       * @return The totalPendingCount.
       */
      @java.lang.Override
      public int getTotalPendingCount() {
        return totalPendingCount_;
      }
      /**
       * <code>optional int32 total_pending_count = 1;</code>
       * @param value The totalPendingCount to set.
       * @return This builder for chaining.
       */
      public Builder setTotalPendingCount(int value) {

        totalPendingCount_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 total_pending_count = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearTotalPendingCount() {
        bitField0_ = (bitField0_ & ~0x00000001);
        totalPendingCount_ = 0;
        onChanged();
        return this;
      }

      private int totalActiveCores_ ;
      /**
       * <code>optional int32 total_active_cores = 2;</code>
       * @return Whether the totalActiveCores field is set.
       */
      @java.lang.Override
      public boolean hasTotalActiveCores() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 total_active_cores = 2;</code>
       * @return The totalActiveCores.
       */
      @java.lang.Override
      public int getTotalActiveCores() {
        return totalActiveCores_;
      }
      /**
       * <code>optional int32 total_active_cores = 2;</code>
       * @param value The totalActiveCores to set.
       * @return This builder for chaining.
       */
      public Builder setTotalActiveCores(int value) {

        totalActiveCores_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 total_active_cores = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearTotalActiveCores() {
        bitField0_ = (bitField0_ & ~0x00000002);
        totalActiveCores_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.EnhancedHeadroomProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.EnhancedHeadroomProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<EnhancedHeadroomProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<EnhancedHeadroomProto>() {
      @java.lang.Override
      public EnhancedHeadroomProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<EnhancedHeadroomProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<EnhancedHeadroomProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface AllocateResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.AllocateResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.AMCommandProto a_m_command = 1;</code>
     * @return Whether the aMCommand field is set.
     */
    boolean hasAMCommand();
    /**
     * <code>optional .hadoop.yarn.AMCommandProto a_m_command = 1;</code>
     * @return The aMCommand.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto getAMCommand();

    /**
     * <code>optional int32 response_id = 2;</code>
     * @return Whether the responseId field is set.
     */
    boolean hasResponseId();
    /**
     * <code>optional int32 response_id = 2;</code>
     * @return The responseId.
     */
    int getResponseId();

    /**
     * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> 
        getAllocatedContainersList();
    /**
     * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getAllocatedContainers(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
     */
    int getAllocatedContainersCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
        getAllocatedContainersOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getAllocatedContainersOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> 
        getCompletedContainerStatusesList();
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getCompletedContainerStatuses(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
     */
    int getCompletedContainerStatusesCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> 
        getCompletedContainerStatusesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getCompletedContainerStatusesOrBuilder(
        int index);

    /**
     * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
     * @return Whether the limit field is set.
     */
    boolean hasLimit();
    /**
     * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
     * @return The limit.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getLimit();
    /**
     * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getLimitOrBuilder();

    /**
     * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> 
        getUpdatedNodesList();
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getUpdatedNodes(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
     */
    int getUpdatedNodesCount();
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> 
        getUpdatedNodesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getUpdatedNodesOrBuilder(
        int index);

    /**
     * <code>optional int32 num_cluster_nodes = 7;</code>
     * @return Whether the numClusterNodes field is set.
     */
    boolean hasNumClusterNodes();
    /**
     * <code>optional int32 num_cluster_nodes = 7;</code>
     * @return The numClusterNodes.
     */
    int getNumClusterNodes();

    /**
     * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
     * @return Whether the preempt field is set.
     */
    boolean hasPreempt();
    /**
     * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
     * @return The preempt.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getPreempt();
    /**
     * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder getPreemptOrBuilder();

    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> 
        getNmTokensList();
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokens(int index);
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
     */
    int getNmTokensCount();
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> 
        getNmTokensOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensOrBuilder(
        int index);

    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
     * @return Whether the amRmToken field is set.
     */
    boolean hasAmRmToken();
    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
     * @return The amRmToken.
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken();
    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder();

    /**
     * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
     * @return Whether the applicationPriority field is set.
     */
    boolean hasApplicationPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
     * @return The applicationPriority.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder();

    /**
     * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
     * @return Whether the collectorInfo field is set.
     */
    boolean hasCollectorInfo();
    /**
     * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
     * @return The collectorInfo.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getCollectorInfo();
    /**
     * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder getCollectorInfoOrBuilder();

    /**
     * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto> 
        getUpdateErrorsList();
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getUpdateErrors(int index);
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
     */
    int getUpdateErrorsCount();
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder> 
        getUpdateErrorsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder getUpdateErrorsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto> 
        getUpdatedContainersList();
    /**
     * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getUpdatedContainers(int index);
    /**
     * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
     */
    int getUpdatedContainersCount();
    /**
     * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder> 
        getUpdatedContainersOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder getUpdatedContainersOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> 
        getContainersFromPreviousAttemptsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
     */
    int getContainersFromPreviousAttemptsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
        getContainersFromPreviousAttemptsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto> 
        getRejectedSchedulingRequestsList();
    /**
     * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getRejectedSchedulingRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
     */
    int getRejectedSchedulingRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder> 
        getRejectedSchedulingRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder getRejectedSchedulingRequestsOrBuilder(
        int index);

    /**
     * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
     * @return Whether the enhancedHeadroom field is set.
     */
    boolean hasEnhancedHeadroom();
    /**
     * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
     * @return The enhancedHeadroom.
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto getEnhancedHeadroom();
    /**
     * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProtoOrBuilder getEnhancedHeadroomOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.AllocateResponseProto}
   */
  public static final class AllocateResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.AllocateResponseProto)
      AllocateResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use AllocateResponseProto.newBuilder() to construct.
    private AllocateResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private AllocateResponseProto() {
      aMCommand_ = 1;
      allocatedContainers_ = java.util.Collections.emptyList();
      completedContainerStatuses_ = java.util.Collections.emptyList();
      updatedNodes_ = java.util.Collections.emptyList();
      nmTokens_ = java.util.Collections.emptyList();
      updateErrors_ = java.util.Collections.emptyList();
      updatedContainers_ = java.util.Collections.emptyList();
      containersFromPreviousAttempts_ = java.util.Collections.emptyList();
      rejectedSchedulingRequests_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new AllocateResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int A_M_COMMAND_FIELD_NUMBER = 1;
    private int aMCommand_ = 1;
    /**
     * <code>optional .hadoop.yarn.AMCommandProto a_m_command = 1;</code>
     * @return Whether the aMCommand field is set.
     */
    @java.lang.Override public boolean hasAMCommand() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.AMCommandProto a_m_command = 1;</code>
     * @return The aMCommand.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto getAMCommand() {
      org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto result = org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto.forNumber(aMCommand_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto.AM_RESYNC : result;
    }

    public static final int RESPONSE_ID_FIELD_NUMBER = 2;
    private int responseId_ = 0;
    /**
     * <code>optional int32 response_id = 2;</code>
     * @return Whether the responseId field is set.
     */
    @java.lang.Override
    public boolean hasResponseId() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 response_id = 2;</code>
     * @return The responseId.
     */
    @java.lang.Override
    public int getResponseId() {
      return responseId_;
    }

    public static final int ALLOCATED_CONTAINERS_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> allocatedContainers_;
    /**
     * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> getAllocatedContainersList() {
      return allocatedContainers_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
        getAllocatedContainersOrBuilderList() {
      return allocatedContainers_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
     */
    @java.lang.Override
    public int getAllocatedContainersCount() {
      return allocatedContainers_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getAllocatedContainers(int index) {
      return allocatedContainers_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getAllocatedContainersOrBuilder(
        int index) {
      return allocatedContainers_.get(index);
    }

    public static final int COMPLETED_CONTAINER_STATUSES_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> completedContainerStatuses_;
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> getCompletedContainerStatusesList() {
      return completedContainerStatuses_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> 
        getCompletedContainerStatusesOrBuilderList() {
      return completedContainerStatuses_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
     */
    @java.lang.Override
    public int getCompletedContainerStatusesCount() {
      return completedContainerStatuses_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getCompletedContainerStatuses(int index) {
      return completedContainerStatuses_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getCompletedContainerStatusesOrBuilder(
        int index) {
      return completedContainerStatuses_.get(index);
    }

    public static final int LIMIT_FIELD_NUMBER = 5;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto limit_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
     * @return Whether the limit field is set.
     */
    @java.lang.Override
    public boolean hasLimit() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
     * @return The limit.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getLimit() {
      return limit_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : limit_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getLimitOrBuilder() {
      return limit_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : limit_;
    }

    public static final int UPDATED_NODES_FIELD_NUMBER = 6;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> updatedNodes_;
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> getUpdatedNodesList() {
      return updatedNodes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> 
        getUpdatedNodesOrBuilderList() {
      return updatedNodes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
     */
    @java.lang.Override
    public int getUpdatedNodesCount() {
      return updatedNodes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getUpdatedNodes(int index) {
      return updatedNodes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getUpdatedNodesOrBuilder(
        int index) {
      return updatedNodes_.get(index);
    }

    public static final int NUM_CLUSTER_NODES_FIELD_NUMBER = 7;
    private int numClusterNodes_ = 0;
    /**
     * <code>optional int32 num_cluster_nodes = 7;</code>
     * @return Whether the numClusterNodes field is set.
     */
    @java.lang.Override
    public boolean hasNumClusterNodes() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional int32 num_cluster_nodes = 7;</code>
     * @return The numClusterNodes.
     */
    @java.lang.Override
    public int getNumClusterNodes() {
      return numClusterNodes_;
    }

    public static final int PREEMPT_FIELD_NUMBER = 8;
    private org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto preempt_;
    /**
     * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
     * @return Whether the preempt field is set.
     */
    @java.lang.Override
    public boolean hasPreempt() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
     * @return The preempt.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getPreempt() {
      return preempt_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance() : preempt_;
    }
    /**
     * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder getPreemptOrBuilder() {
      return preempt_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance() : preempt_;
    }

    public static final int NM_TOKENS_FIELD_NUMBER = 9;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> nmTokens_;
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> getNmTokensList() {
      return nmTokens_;
    }
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> 
        getNmTokensOrBuilderList() {
      return nmTokens_;
    }
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
     */
    @java.lang.Override
    public int getNmTokensCount() {
      return nmTokens_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokens(int index) {
      return nmTokens_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensOrBuilder(
        int index) {
      return nmTokens_.get(index);
    }

    public static final int AM_RM_TOKEN_FIELD_NUMBER = 12;
    private org.apache.hadoop.security.proto.SecurityProtos.TokenProto amRmToken_;
    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
     * @return Whether the amRmToken field is set.
     */
    @java.lang.Override
    public boolean hasAmRmToken() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
     * @return The amRmToken.
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken() {
      return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_;
    }
    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder() {
      return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_;
    }

    public static final int APPLICATION_PRIORITY_FIELD_NUMBER = 13;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_;
    /**
     * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
     * @return Whether the applicationPriority field is set.
     */
    @java.lang.Override
    public boolean hasApplicationPriority() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
     * @return The applicationPriority.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() {
      return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() {
      return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
    }

    public static final int COLLECTOR_INFO_FIELD_NUMBER = 14;
    private org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto collectorInfo_;
    /**
     * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
     * @return Whether the collectorInfo field is set.
     */
    @java.lang.Override
    public boolean hasCollectorInfo() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
     * @return The collectorInfo.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getCollectorInfo() {
      return collectorInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance() : collectorInfo_;
    }
    /**
     * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder getCollectorInfoOrBuilder() {
      return collectorInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance() : collectorInfo_;
    }

    public static final int UPDATE_ERRORS_FIELD_NUMBER = 15;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto> updateErrors_;
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto> getUpdateErrorsList() {
      return updateErrors_;
    }
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder> 
        getUpdateErrorsOrBuilderList() {
      return updateErrors_;
    }
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
     */
    @java.lang.Override
    public int getUpdateErrorsCount() {
      return updateErrors_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getUpdateErrors(int index) {
      return updateErrors_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder getUpdateErrorsOrBuilder(
        int index) {
      return updateErrors_.get(index);
    }

    public static final int UPDATED_CONTAINERS_FIELD_NUMBER = 16;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto> updatedContainers_;
    /**
     * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto> getUpdatedContainersList() {
      return updatedContainers_;
    }
    /**
     * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder> 
        getUpdatedContainersOrBuilderList() {
      return updatedContainers_;
    }
    /**
     * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
     */
    @java.lang.Override
    public int getUpdatedContainersCount() {
      return updatedContainers_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getUpdatedContainers(int index) {
      return updatedContainers_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder getUpdatedContainersOrBuilder(
        int index) {
      return updatedContainers_.get(index);
    }

    public static final int CONTAINERS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER = 17;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> containersFromPreviousAttempts_;
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> getContainersFromPreviousAttemptsList() {
      return containersFromPreviousAttempts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
        getContainersFromPreviousAttemptsOrBuilderList() {
      return containersFromPreviousAttempts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
     */
    @java.lang.Override
    public int getContainersFromPreviousAttemptsCount() {
      return containersFromPreviousAttempts_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index) {
      return containersFromPreviousAttempts_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder(
        int index) {
      return containersFromPreviousAttempts_.get(index);
    }

    public static final int REJECTED_SCHEDULING_REQUESTS_FIELD_NUMBER = 18;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto> rejectedSchedulingRequests_;
    /**
     * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto> getRejectedSchedulingRequestsList() {
      return rejectedSchedulingRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder> 
        getRejectedSchedulingRequestsOrBuilderList() {
      return rejectedSchedulingRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
     */
    @java.lang.Override
    public int getRejectedSchedulingRequestsCount() {
      return rejectedSchedulingRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getRejectedSchedulingRequests(int index) {
      return rejectedSchedulingRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder getRejectedSchedulingRequestsOrBuilder(
        int index) {
      return rejectedSchedulingRequests_.get(index);
    }

    public static final int ENHANCED_HEADROOM_FIELD_NUMBER = 19;
    private org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto enhancedHeadroom_;
    /**
     * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
     * @return Whether the enhancedHeadroom field is set.
     */
    @java.lang.Override
    public boolean hasEnhancedHeadroom() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
     * @return The enhancedHeadroom.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto getEnhancedHeadroom() {
      return enhancedHeadroom_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.getDefaultInstance() : enhancedHeadroom_;
    }
    /**
     * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProtoOrBuilder getEnhancedHeadroomOrBuilder() {
      return enhancedHeadroom_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.getDefaultInstance() : enhancedHeadroom_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getAllocatedContainersCount(); i++) {
        if (!getAllocatedContainers(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getCompletedContainerStatusesCount(); i++) {
        if (!getCompletedContainerStatuses(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasLimit()) {
        if (!getLimit().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getUpdatedNodesCount(); i++) {
        if (!getUpdatedNodes(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasPreempt()) {
        if (!getPreempt().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getNmTokensCount(); i++) {
        if (!getNmTokens(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasAmRmToken()) {
        if (!getAmRmToken().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasCollectorInfo()) {
        if (!getCollectorInfo().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getUpdateErrorsCount(); i++) {
        if (!getUpdateErrors(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getUpdatedContainersCount(); i++) {
        if (!getUpdatedContainers(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getContainersFromPreviousAttemptsCount(); i++) {
        if (!getContainersFromPreviousAttempts(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getRejectedSchedulingRequestsCount(); i++) {
        if (!getRejectedSchedulingRequests(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, aMCommand_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, responseId_);
      }
      for (int i = 0; i < allocatedContainers_.size(); i++) {
        output.writeMessage(3, allocatedContainers_.get(i));
      }
      for (int i = 0; i < completedContainerStatuses_.size(); i++) {
        output.writeMessage(4, completedContainerStatuses_.get(i));
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeMessage(5, getLimit());
      }
      for (int i = 0; i < updatedNodes_.size(); i++) {
        output.writeMessage(6, updatedNodes_.get(i));
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt32(7, numClusterNodes_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeMessage(8, getPreempt());
      }
      for (int i = 0; i < nmTokens_.size(); i++) {
        output.writeMessage(9, nmTokens_.get(i));
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeMessage(12, getAmRmToken());
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeMessage(13, getApplicationPriority());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeMessage(14, getCollectorInfo());
      }
      for (int i = 0; i < updateErrors_.size(); i++) {
        output.writeMessage(15, updateErrors_.get(i));
      }
      for (int i = 0; i < updatedContainers_.size(); i++) {
        output.writeMessage(16, updatedContainers_.get(i));
      }
      for (int i = 0; i < containersFromPreviousAttempts_.size(); i++) {
        output.writeMessage(17, containersFromPreviousAttempts_.get(i));
      }
      for (int i = 0; i < rejectedSchedulingRequests_.size(); i++) {
        output.writeMessage(18, rejectedSchedulingRequests_.get(i));
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeMessage(19, getEnhancedHeadroom());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, aMCommand_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, responseId_);
      }
      for (int i = 0; i < allocatedContainers_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, allocatedContainers_.get(i));
      }
      for (int i = 0; i < completedContainerStatuses_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, completedContainerStatuses_.get(i));
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(5, getLimit());
      }
      for (int i = 0; i < updatedNodes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(6, updatedNodes_.get(i));
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(7, numClusterNodes_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(8, getPreempt());
      }
      for (int i = 0; i < nmTokens_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(9, nmTokens_.get(i));
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(12, getAmRmToken());
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(13, getApplicationPriority());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(14, getCollectorInfo());
      }
      for (int i = 0; i < updateErrors_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(15, updateErrors_.get(i));
      }
      for (int i = 0; i < updatedContainers_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(16, updatedContainers_.get(i));
      }
      for (int i = 0; i < containersFromPreviousAttempts_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(17, containersFromPreviousAttempts_.get(i));
      }
      for (int i = 0; i < rejectedSchedulingRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(18, rejectedSchedulingRequests_.get(i));
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(19, getEnhancedHeadroom());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto) obj;

      if (hasAMCommand() != other.hasAMCommand()) return false;
      if (hasAMCommand()) {
        if (aMCommand_ != other.aMCommand_) return false;
      }
      if (hasResponseId() != other.hasResponseId()) return false;
      if (hasResponseId()) {
        if (getResponseId()
            != other.getResponseId()) return false;
      }
      if (!getAllocatedContainersList()
          .equals(other.getAllocatedContainersList())) return false;
      if (!getCompletedContainerStatusesList()
          .equals(other.getCompletedContainerStatusesList())) return false;
      if (hasLimit() != other.hasLimit()) return false;
      if (hasLimit()) {
        if (!getLimit()
            .equals(other.getLimit())) return false;
      }
      if (!getUpdatedNodesList()
          .equals(other.getUpdatedNodesList())) return false;
      if (hasNumClusterNodes() != other.hasNumClusterNodes()) return false;
      if (hasNumClusterNodes()) {
        if (getNumClusterNodes()
            != other.getNumClusterNodes()) return false;
      }
      if (hasPreempt() != other.hasPreempt()) return false;
      if (hasPreempt()) {
        if (!getPreempt()
            .equals(other.getPreempt())) return false;
      }
      if (!getNmTokensList()
          .equals(other.getNmTokensList())) return false;
      if (hasAmRmToken() != other.hasAmRmToken()) return false;
      if (hasAmRmToken()) {
        if (!getAmRmToken()
            .equals(other.getAmRmToken())) return false;
      }
      if (hasApplicationPriority() != other.hasApplicationPriority()) return false;
      if (hasApplicationPriority()) {
        if (!getApplicationPriority()
            .equals(other.getApplicationPriority())) return false;
      }
      if (hasCollectorInfo() != other.hasCollectorInfo()) return false;
      if (hasCollectorInfo()) {
        if (!getCollectorInfo()
            .equals(other.getCollectorInfo())) return false;
      }
      if (!getUpdateErrorsList()
          .equals(other.getUpdateErrorsList())) return false;
      if (!getUpdatedContainersList()
          .equals(other.getUpdatedContainersList())) return false;
      if (!getContainersFromPreviousAttemptsList()
          .equals(other.getContainersFromPreviousAttemptsList())) return false;
      if (!getRejectedSchedulingRequestsList()
          .equals(other.getRejectedSchedulingRequestsList())) return false;
      if (hasEnhancedHeadroom() != other.hasEnhancedHeadroom()) return false;
      if (hasEnhancedHeadroom()) {
        if (!getEnhancedHeadroom()
            .equals(other.getEnhancedHeadroom())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasAMCommand()) {
        hash = (37 * hash) + A_M_COMMAND_FIELD_NUMBER;
        hash = (53 * hash) + aMCommand_;
      }
      if (hasResponseId()) {
        hash = (37 * hash) + RESPONSE_ID_FIELD_NUMBER;
        hash = (53 * hash) + getResponseId();
      }
      if (getAllocatedContainersCount() > 0) {
        hash = (37 * hash) + ALLOCATED_CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getAllocatedContainersList().hashCode();
      }
      if (getCompletedContainerStatusesCount() > 0) {
        hash = (37 * hash) + COMPLETED_CONTAINER_STATUSES_FIELD_NUMBER;
        hash = (53 * hash) + getCompletedContainerStatusesList().hashCode();
      }
      if (hasLimit()) {
        hash = (37 * hash) + LIMIT_FIELD_NUMBER;
        hash = (53 * hash) + getLimit().hashCode();
      }
      if (getUpdatedNodesCount() > 0) {
        hash = (37 * hash) + UPDATED_NODES_FIELD_NUMBER;
        hash = (53 * hash) + getUpdatedNodesList().hashCode();
      }
      if (hasNumClusterNodes()) {
        hash = (37 * hash) + NUM_CLUSTER_NODES_FIELD_NUMBER;
        hash = (53 * hash) + getNumClusterNodes();
      }
      if (hasPreempt()) {
        hash = (37 * hash) + PREEMPT_FIELD_NUMBER;
        hash = (53 * hash) + getPreempt().hashCode();
      }
      if (getNmTokensCount() > 0) {
        hash = (37 * hash) + NM_TOKENS_FIELD_NUMBER;
        hash = (53 * hash) + getNmTokensList().hashCode();
      }
      if (hasAmRmToken()) {
        hash = (37 * hash) + AM_RM_TOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getAmRmToken().hashCode();
      }
      if (hasApplicationPriority()) {
        hash = (37 * hash) + APPLICATION_PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationPriority().hashCode();
      }
      if (hasCollectorInfo()) {
        hash = (37 * hash) + COLLECTOR_INFO_FIELD_NUMBER;
        hash = (53 * hash) + getCollectorInfo().hashCode();
      }
      if (getUpdateErrorsCount() > 0) {
        hash = (37 * hash) + UPDATE_ERRORS_FIELD_NUMBER;
        hash = (53 * hash) + getUpdateErrorsList().hashCode();
      }
      if (getUpdatedContainersCount() > 0) {
        hash = (37 * hash) + UPDATED_CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getUpdatedContainersList().hashCode();
      }
      if (getContainersFromPreviousAttemptsCount() > 0) {
        hash = (37 * hash) + CONTAINERS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER;
        hash = (53 * hash) + getContainersFromPreviousAttemptsList().hashCode();
      }
      if (getRejectedSchedulingRequestsCount() > 0) {
        hash = (37 * hash) + REJECTED_SCHEDULING_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getRejectedSchedulingRequestsList().hashCode();
      }
      if (hasEnhancedHeadroom()) {
        hash = (37 * hash) + ENHANCED_HEADROOM_FIELD_NUMBER;
        hash = (53 * hash) + getEnhancedHeadroom().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.AllocateResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.AllocateResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getAllocatedContainersFieldBuilder();
          getCompletedContainerStatusesFieldBuilder();
          getLimitFieldBuilder();
          getUpdatedNodesFieldBuilder();
          getPreemptFieldBuilder();
          getNmTokensFieldBuilder();
          getAmRmTokenFieldBuilder();
          getApplicationPriorityFieldBuilder();
          getCollectorInfoFieldBuilder();
          getUpdateErrorsFieldBuilder();
          getUpdatedContainersFieldBuilder();
          getContainersFromPreviousAttemptsFieldBuilder();
          getRejectedSchedulingRequestsFieldBuilder();
          getEnhancedHeadroomFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        aMCommand_ = 1;
        responseId_ = 0;
        if (allocatedContainersBuilder_ == null) {
          allocatedContainers_ = java.util.Collections.emptyList();
        } else {
          allocatedContainers_ = null;
          allocatedContainersBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        if (completedContainerStatusesBuilder_ == null) {
          completedContainerStatuses_ = java.util.Collections.emptyList();
        } else {
          completedContainerStatuses_ = null;
          completedContainerStatusesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        limit_ = null;
        if (limitBuilder_ != null) {
          limitBuilder_.dispose();
          limitBuilder_ = null;
        }
        if (updatedNodesBuilder_ == null) {
          updatedNodes_ = java.util.Collections.emptyList();
        } else {
          updatedNodes_ = null;
          updatedNodesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        numClusterNodes_ = 0;
        preempt_ = null;
        if (preemptBuilder_ != null) {
          preemptBuilder_.dispose();
          preemptBuilder_ = null;
        }
        if (nmTokensBuilder_ == null) {
          nmTokens_ = java.util.Collections.emptyList();
        } else {
          nmTokens_ = null;
          nmTokensBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000100);
        amRmToken_ = null;
        if (amRmTokenBuilder_ != null) {
          amRmTokenBuilder_.dispose();
          amRmTokenBuilder_ = null;
        }
        applicationPriority_ = null;
        if (applicationPriorityBuilder_ != null) {
          applicationPriorityBuilder_.dispose();
          applicationPriorityBuilder_ = null;
        }
        collectorInfo_ = null;
        if (collectorInfoBuilder_ != null) {
          collectorInfoBuilder_.dispose();
          collectorInfoBuilder_ = null;
        }
        if (updateErrorsBuilder_ == null) {
          updateErrors_ = java.util.Collections.emptyList();
        } else {
          updateErrors_ = null;
          updateErrorsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00001000);
        if (updatedContainersBuilder_ == null) {
          updatedContainers_ = java.util.Collections.emptyList();
        } else {
          updatedContainers_ = null;
          updatedContainersBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00002000);
        if (containersFromPreviousAttemptsBuilder_ == null) {
          containersFromPreviousAttempts_ = java.util.Collections.emptyList();
        } else {
          containersFromPreviousAttempts_ = null;
          containersFromPreviousAttemptsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00004000);
        if (rejectedSchedulingRequestsBuilder_ == null) {
          rejectedSchedulingRequests_ = java.util.Collections.emptyList();
        } else {
          rejectedSchedulingRequests_ = null;
          rejectedSchedulingRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00008000);
        enhancedHeadroom_ = null;
        if (enhancedHeadroomBuilder_ != null) {
          enhancedHeadroomBuilder_.dispose();
          enhancedHeadroomBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto result) {
        if (allocatedContainersBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0)) {
            allocatedContainers_ = java.util.Collections.unmodifiableList(allocatedContainers_);
            bitField0_ = (bitField0_ & ~0x00000004);
          }
          result.allocatedContainers_ = allocatedContainers_;
        } else {
          result.allocatedContainers_ = allocatedContainersBuilder_.build();
        }
        if (completedContainerStatusesBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0)) {
            completedContainerStatuses_ = java.util.Collections.unmodifiableList(completedContainerStatuses_);
            bitField0_ = (bitField0_ & ~0x00000008);
          }
          result.completedContainerStatuses_ = completedContainerStatuses_;
        } else {
          result.completedContainerStatuses_ = completedContainerStatusesBuilder_.build();
        }
        if (updatedNodesBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0)) {
            updatedNodes_ = java.util.Collections.unmodifiableList(updatedNodes_);
            bitField0_ = (bitField0_ & ~0x00000020);
          }
          result.updatedNodes_ = updatedNodes_;
        } else {
          result.updatedNodes_ = updatedNodesBuilder_.build();
        }
        if (nmTokensBuilder_ == null) {
          if (((bitField0_ & 0x00000100) != 0)) {
            nmTokens_ = java.util.Collections.unmodifiableList(nmTokens_);
            bitField0_ = (bitField0_ & ~0x00000100);
          }
          result.nmTokens_ = nmTokens_;
        } else {
          result.nmTokens_ = nmTokensBuilder_.build();
        }
        if (updateErrorsBuilder_ == null) {
          if (((bitField0_ & 0x00001000) != 0)) {
            updateErrors_ = java.util.Collections.unmodifiableList(updateErrors_);
            bitField0_ = (bitField0_ & ~0x00001000);
          }
          result.updateErrors_ = updateErrors_;
        } else {
          result.updateErrors_ = updateErrorsBuilder_.build();
        }
        if (updatedContainersBuilder_ == null) {
          if (((bitField0_ & 0x00002000) != 0)) {
            updatedContainers_ = java.util.Collections.unmodifiableList(updatedContainers_);
            bitField0_ = (bitField0_ & ~0x00002000);
          }
          result.updatedContainers_ = updatedContainers_;
        } else {
          result.updatedContainers_ = updatedContainersBuilder_.build();
        }
        if (containersFromPreviousAttemptsBuilder_ == null) {
          if (((bitField0_ & 0x00004000) != 0)) {
            containersFromPreviousAttempts_ = java.util.Collections.unmodifiableList(containersFromPreviousAttempts_);
            bitField0_ = (bitField0_ & ~0x00004000);
          }
          result.containersFromPreviousAttempts_ = containersFromPreviousAttempts_;
        } else {
          result.containersFromPreviousAttempts_ = containersFromPreviousAttemptsBuilder_.build();
        }
        if (rejectedSchedulingRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00008000) != 0)) {
            rejectedSchedulingRequests_ = java.util.Collections.unmodifiableList(rejectedSchedulingRequests_);
            bitField0_ = (bitField0_ & ~0x00008000);
          }
          result.rejectedSchedulingRequests_ = rejectedSchedulingRequests_;
        } else {
          result.rejectedSchedulingRequests_ = rejectedSchedulingRequestsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.aMCommand_ = aMCommand_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.responseId_ = responseId_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.limit_ = limitBuilder_ == null
              ? limit_
              : limitBuilder_.build();
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.numClusterNodes_ = numClusterNodes_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.preempt_ = preemptBuilder_ == null
              ? preempt_
              : preemptBuilder_.build();
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          result.amRmToken_ = amRmTokenBuilder_ == null
              ? amRmToken_
              : amRmTokenBuilder_.build();
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000400) != 0)) {
          result.applicationPriority_ = applicationPriorityBuilder_ == null
              ? applicationPriority_
              : applicationPriorityBuilder_.build();
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000800) != 0)) {
          result.collectorInfo_ = collectorInfoBuilder_ == null
              ? collectorInfo_
              : collectorInfoBuilder_.build();
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00010000) != 0)) {
          result.enhancedHeadroom_ = enhancedHeadroomBuilder_ == null
              ? enhancedHeadroom_
              : enhancedHeadroomBuilder_.build();
          to_bitField0_ |= 0x00000100;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance()) return this;
        if (other.hasAMCommand()) {
          setAMCommand(other.getAMCommand());
        }
        if (other.hasResponseId()) {
          setResponseId(other.getResponseId());
        }
        if (allocatedContainersBuilder_ == null) {
          if (!other.allocatedContainers_.isEmpty()) {
            if (allocatedContainers_.isEmpty()) {
              allocatedContainers_ = other.allocatedContainers_;
              bitField0_ = (bitField0_ & ~0x00000004);
            } else {
              ensureAllocatedContainersIsMutable();
              allocatedContainers_.addAll(other.allocatedContainers_);
            }
            onChanged();
          }
        } else {
          if (!other.allocatedContainers_.isEmpty()) {
            if (allocatedContainersBuilder_.isEmpty()) {
              allocatedContainersBuilder_.dispose();
              allocatedContainersBuilder_ = null;
              allocatedContainers_ = other.allocatedContainers_;
              bitField0_ = (bitField0_ & ~0x00000004);
              allocatedContainersBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getAllocatedContainersFieldBuilder() : null;
            } else {
              allocatedContainersBuilder_.addAllMessages(other.allocatedContainers_);
            }
          }
        }
        if (completedContainerStatusesBuilder_ == null) {
          if (!other.completedContainerStatuses_.isEmpty()) {
            if (completedContainerStatuses_.isEmpty()) {
              completedContainerStatuses_ = other.completedContainerStatuses_;
              bitField0_ = (bitField0_ & ~0x00000008);
            } else {
              ensureCompletedContainerStatusesIsMutable();
              completedContainerStatuses_.addAll(other.completedContainerStatuses_);
            }
            onChanged();
          }
        } else {
          if (!other.completedContainerStatuses_.isEmpty()) {
            if (completedContainerStatusesBuilder_.isEmpty()) {
              completedContainerStatusesBuilder_.dispose();
              completedContainerStatusesBuilder_ = null;
              completedContainerStatuses_ = other.completedContainerStatuses_;
              bitField0_ = (bitField0_ & ~0x00000008);
              completedContainerStatusesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getCompletedContainerStatusesFieldBuilder() : null;
            } else {
              completedContainerStatusesBuilder_.addAllMessages(other.completedContainerStatuses_);
            }
          }
        }
        if (other.hasLimit()) {
          mergeLimit(other.getLimit());
        }
        if (updatedNodesBuilder_ == null) {
          if (!other.updatedNodes_.isEmpty()) {
            if (updatedNodes_.isEmpty()) {
              updatedNodes_ = other.updatedNodes_;
              bitField0_ = (bitField0_ & ~0x00000020);
            } else {
              ensureUpdatedNodesIsMutable();
              updatedNodes_.addAll(other.updatedNodes_);
            }
            onChanged();
          }
        } else {
          if (!other.updatedNodes_.isEmpty()) {
            if (updatedNodesBuilder_.isEmpty()) {
              updatedNodesBuilder_.dispose();
              updatedNodesBuilder_ = null;
              updatedNodes_ = other.updatedNodes_;
              bitField0_ = (bitField0_ & ~0x00000020);
              updatedNodesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getUpdatedNodesFieldBuilder() : null;
            } else {
              updatedNodesBuilder_.addAllMessages(other.updatedNodes_);
            }
          }
        }
        if (other.hasNumClusterNodes()) {
          setNumClusterNodes(other.getNumClusterNodes());
        }
        if (other.hasPreempt()) {
          mergePreempt(other.getPreempt());
        }
        if (nmTokensBuilder_ == null) {
          if (!other.nmTokens_.isEmpty()) {
            if (nmTokens_.isEmpty()) {
              nmTokens_ = other.nmTokens_;
              bitField0_ = (bitField0_ & ~0x00000100);
            } else {
              ensureNmTokensIsMutable();
              nmTokens_.addAll(other.nmTokens_);
            }
            onChanged();
          }
        } else {
          if (!other.nmTokens_.isEmpty()) {
            if (nmTokensBuilder_.isEmpty()) {
              nmTokensBuilder_.dispose();
              nmTokensBuilder_ = null;
              nmTokens_ = other.nmTokens_;
              bitField0_ = (bitField0_ & ~0x00000100);
              nmTokensBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNmTokensFieldBuilder() : null;
            } else {
              nmTokensBuilder_.addAllMessages(other.nmTokens_);
            }
          }
        }
        if (other.hasAmRmToken()) {
          mergeAmRmToken(other.getAmRmToken());
        }
        if (other.hasApplicationPriority()) {
          mergeApplicationPriority(other.getApplicationPriority());
        }
        if (other.hasCollectorInfo()) {
          mergeCollectorInfo(other.getCollectorInfo());
        }
        if (updateErrorsBuilder_ == null) {
          if (!other.updateErrors_.isEmpty()) {
            if (updateErrors_.isEmpty()) {
              updateErrors_ = other.updateErrors_;
              bitField0_ = (bitField0_ & ~0x00001000);
            } else {
              ensureUpdateErrorsIsMutable();
              updateErrors_.addAll(other.updateErrors_);
            }
            onChanged();
          }
        } else {
          if (!other.updateErrors_.isEmpty()) {
            if (updateErrorsBuilder_.isEmpty()) {
              updateErrorsBuilder_.dispose();
              updateErrorsBuilder_ = null;
              updateErrors_ = other.updateErrors_;
              bitField0_ = (bitField0_ & ~0x00001000);
              updateErrorsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getUpdateErrorsFieldBuilder() : null;
            } else {
              updateErrorsBuilder_.addAllMessages(other.updateErrors_);
            }
          }
        }
        if (updatedContainersBuilder_ == null) {
          if (!other.updatedContainers_.isEmpty()) {
            if (updatedContainers_.isEmpty()) {
              updatedContainers_ = other.updatedContainers_;
              bitField0_ = (bitField0_ & ~0x00002000);
            } else {
              ensureUpdatedContainersIsMutable();
              updatedContainers_.addAll(other.updatedContainers_);
            }
            onChanged();
          }
        } else {
          if (!other.updatedContainers_.isEmpty()) {
            if (updatedContainersBuilder_.isEmpty()) {
              updatedContainersBuilder_.dispose();
              updatedContainersBuilder_ = null;
              updatedContainers_ = other.updatedContainers_;
              bitField0_ = (bitField0_ & ~0x00002000);
              updatedContainersBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getUpdatedContainersFieldBuilder() : null;
            } else {
              updatedContainersBuilder_.addAllMessages(other.updatedContainers_);
            }
          }
        }
        if (containersFromPreviousAttemptsBuilder_ == null) {
          if (!other.containersFromPreviousAttempts_.isEmpty()) {
            if (containersFromPreviousAttempts_.isEmpty()) {
              containersFromPreviousAttempts_ = other.containersFromPreviousAttempts_;
              bitField0_ = (bitField0_ & ~0x00004000);
            } else {
              ensureContainersFromPreviousAttemptsIsMutable();
              containersFromPreviousAttempts_.addAll(other.containersFromPreviousAttempts_);
            }
            onChanged();
          }
        } else {
          if (!other.containersFromPreviousAttempts_.isEmpty()) {
            if (containersFromPreviousAttemptsBuilder_.isEmpty()) {
              containersFromPreviousAttemptsBuilder_.dispose();
              containersFromPreviousAttemptsBuilder_ = null;
              containersFromPreviousAttempts_ = other.containersFromPreviousAttempts_;
              bitField0_ = (bitField0_ & ~0x00004000);
              containersFromPreviousAttemptsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getContainersFromPreviousAttemptsFieldBuilder() : null;
            } else {
              containersFromPreviousAttemptsBuilder_.addAllMessages(other.containersFromPreviousAttempts_);
            }
          }
        }
        if (rejectedSchedulingRequestsBuilder_ == null) {
          if (!other.rejectedSchedulingRequests_.isEmpty()) {
            if (rejectedSchedulingRequests_.isEmpty()) {
              rejectedSchedulingRequests_ = other.rejectedSchedulingRequests_;
              bitField0_ = (bitField0_ & ~0x00008000);
            } else {
              ensureRejectedSchedulingRequestsIsMutable();
              rejectedSchedulingRequests_.addAll(other.rejectedSchedulingRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.rejectedSchedulingRequests_.isEmpty()) {
            if (rejectedSchedulingRequestsBuilder_.isEmpty()) {
              rejectedSchedulingRequestsBuilder_.dispose();
              rejectedSchedulingRequestsBuilder_ = null;
              rejectedSchedulingRequests_ = other.rejectedSchedulingRequests_;
              bitField0_ = (bitField0_ & ~0x00008000);
              rejectedSchedulingRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getRejectedSchedulingRequestsFieldBuilder() : null;
            } else {
              rejectedSchedulingRequestsBuilder_.addAllMessages(other.rejectedSchedulingRequests_);
            }
          }
        }
        if (other.hasEnhancedHeadroom()) {
          mergeEnhancedHeadroom(other.getEnhancedHeadroom());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getAllocatedContainersCount(); i++) {
          if (!getAllocatedContainers(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getCompletedContainerStatusesCount(); i++) {
          if (!getCompletedContainerStatuses(i).isInitialized()) {
            return false;
          }
        }
        if (hasLimit()) {
          if (!getLimit().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getUpdatedNodesCount(); i++) {
          if (!getUpdatedNodes(i).isInitialized()) {
            return false;
          }
        }
        if (hasPreempt()) {
          if (!getPreempt().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getNmTokensCount(); i++) {
          if (!getNmTokens(i).isInitialized()) {
            return false;
          }
        }
        if (hasAmRmToken()) {
          if (!getAmRmToken().isInitialized()) {
            return false;
          }
        }
        if (hasCollectorInfo()) {
          if (!getCollectorInfo().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getUpdateErrorsCount(); i++) {
          if (!getUpdateErrors(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getUpdatedContainersCount(); i++) {
          if (!getUpdatedContainers(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getContainersFromPreviousAttemptsCount(); i++) {
          if (!getContainersFromPreviousAttempts(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getRejectedSchedulingRequestsCount(); i++) {
          if (!getRejectedSchedulingRequests(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  aMCommand_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 16: {
                responseId_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 26: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER,
                        extensionRegistry);
                if (allocatedContainersBuilder_ == null) {
                  ensureAllocatedContainersIsMutable();
                  allocatedContainers_.add(m);
                } else {
                  allocatedContainersBuilder_.addMessage(m);
                }
                break;
              } // case 26
              case 34: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.PARSER,
                        extensionRegistry);
                if (completedContainerStatusesBuilder_ == null) {
                  ensureCompletedContainerStatusesIsMutable();
                  completedContainerStatuses_.add(m);
                } else {
                  completedContainerStatusesBuilder_.addMessage(m);
                }
                break;
              } // case 34
              case 42: {
                input.readMessage(
                    getLimitFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 50: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.PARSER,
                        extensionRegistry);
                if (updatedNodesBuilder_ == null) {
                  ensureUpdatedNodesIsMutable();
                  updatedNodes_.add(m);
                } else {
                  updatedNodesBuilder_.addMessage(m);
                }
                break;
              } // case 50
              case 56: {
                numClusterNodes_ = input.readInt32();
                bitField0_ |= 0x00000040;
                break;
              } // case 56
              case 66: {
                input.readMessage(
                    getPreemptFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000080;
                break;
              } // case 66
              case 74: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.PARSER,
                        extensionRegistry);
                if (nmTokensBuilder_ == null) {
                  ensureNmTokensIsMutable();
                  nmTokens_.add(m);
                } else {
                  nmTokensBuilder_.addMessage(m);
                }
                break;
              } // case 74
              case 98: {
                input.readMessage(
                    getAmRmTokenFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000200;
                break;
              } // case 98
              case 106: {
                input.readMessage(
                    getApplicationPriorityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000400;
                break;
              } // case 106
              case 114: {
                input.readMessage(
                    getCollectorInfoFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000800;
                break;
              } // case 114
              case 122: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.PARSER,
                        extensionRegistry);
                if (updateErrorsBuilder_ == null) {
                  ensureUpdateErrorsIsMutable();
                  updateErrors_.add(m);
                } else {
                  updateErrorsBuilder_.addMessage(m);
                }
                break;
              } // case 122
              case 130: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.PARSER,
                        extensionRegistry);
                if (updatedContainersBuilder_ == null) {
                  ensureUpdatedContainersIsMutable();
                  updatedContainers_.add(m);
                } else {
                  updatedContainersBuilder_.addMessage(m);
                }
                break;
              } // case 130
              case 138: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER,
                        extensionRegistry);
                if (containersFromPreviousAttemptsBuilder_ == null) {
                  ensureContainersFromPreviousAttemptsIsMutable();
                  containersFromPreviousAttempts_.add(m);
                } else {
                  containersFromPreviousAttemptsBuilder_.addMessage(m);
                }
                break;
              } // case 138
              case 146: {
                org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.PARSER,
                        extensionRegistry);
                if (rejectedSchedulingRequestsBuilder_ == null) {
                  ensureRejectedSchedulingRequestsIsMutable();
                  rejectedSchedulingRequests_.add(m);
                } else {
                  rejectedSchedulingRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 146
              case 154: {
                input.readMessage(
                    getEnhancedHeadroomFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00010000;
                break;
              } // case 154
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int aMCommand_ = 1;
      /**
       * <code>optional .hadoop.yarn.AMCommandProto a_m_command = 1;</code>
       * @return Whether the aMCommand field is set.
       */
      @java.lang.Override public boolean hasAMCommand() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.AMCommandProto a_m_command = 1;</code>
       * @return The aMCommand.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto getAMCommand() {
        org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto result = org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto.forNumber(aMCommand_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto.AM_RESYNC : result;
      }
      /**
       * <code>optional .hadoop.yarn.AMCommandProto a_m_command = 1;</code>
       * @param value The aMCommand to set.
       * @return This builder for chaining.
       */
      public Builder setAMCommand(org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        aMCommand_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.AMCommandProto a_m_command = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearAMCommand() {
        bitField0_ = (bitField0_ & ~0x00000001);
        aMCommand_ = 1;
        onChanged();
        return this;
      }

      private int responseId_ ;
      /**
       * <code>optional int32 response_id = 2;</code>
       * @return Whether the responseId field is set.
       */
      @java.lang.Override
      public boolean hasResponseId() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 response_id = 2;</code>
       * @return The responseId.
       */
      @java.lang.Override
      public int getResponseId() {
        return responseId_;
      }
      /**
       * <code>optional int32 response_id = 2;</code>
       * @param value The responseId to set.
       * @return This builder for chaining.
       */
      public Builder setResponseId(int value) {

        responseId_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 response_id = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearResponseId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        responseId_ = 0;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> allocatedContainers_ =
        java.util.Collections.emptyList();
      private void ensureAllocatedContainersIsMutable() {
        if (!((bitField0_ & 0x00000004) != 0)) {
          allocatedContainers_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto>(allocatedContainers_);
          bitField0_ |= 0x00000004;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> allocatedContainersBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> getAllocatedContainersList() {
        if (allocatedContainersBuilder_ == null) {
          return java.util.Collections.unmodifiableList(allocatedContainers_);
        } else {
          return allocatedContainersBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public int getAllocatedContainersCount() {
        if (allocatedContainersBuilder_ == null) {
          return allocatedContainers_.size();
        } else {
          return allocatedContainersBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getAllocatedContainers(int index) {
        if (allocatedContainersBuilder_ == null) {
          return allocatedContainers_.get(index);
        } else {
          return allocatedContainersBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public Builder setAllocatedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (allocatedContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAllocatedContainersIsMutable();
          allocatedContainers_.set(index, value);
          onChanged();
        } else {
          allocatedContainersBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public Builder setAllocatedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (allocatedContainersBuilder_ == null) {
          ensureAllocatedContainersIsMutable();
          allocatedContainers_.set(index, builderForValue.build());
          onChanged();
        } else {
          allocatedContainersBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public Builder addAllocatedContainers(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (allocatedContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAllocatedContainersIsMutable();
          allocatedContainers_.add(value);
          onChanged();
        } else {
          allocatedContainersBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public Builder addAllocatedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (allocatedContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAllocatedContainersIsMutable();
          allocatedContainers_.add(index, value);
          onChanged();
        } else {
          allocatedContainersBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public Builder addAllocatedContainers(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (allocatedContainersBuilder_ == null) {
          ensureAllocatedContainersIsMutable();
          allocatedContainers_.add(builderForValue.build());
          onChanged();
        } else {
          allocatedContainersBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public Builder addAllocatedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (allocatedContainersBuilder_ == null) {
          ensureAllocatedContainersIsMutable();
          allocatedContainers_.add(index, builderForValue.build());
          onChanged();
        } else {
          allocatedContainersBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public Builder addAllAllocatedContainers(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> values) {
        if (allocatedContainersBuilder_ == null) {
          ensureAllocatedContainersIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, allocatedContainers_);
          onChanged();
        } else {
          allocatedContainersBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public Builder clearAllocatedContainers() {
        if (allocatedContainersBuilder_ == null) {
          allocatedContainers_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
          onChanged();
        } else {
          allocatedContainersBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public Builder removeAllocatedContainers(int index) {
        if (allocatedContainersBuilder_ == null) {
          ensureAllocatedContainersIsMutable();
          allocatedContainers_.remove(index);
          onChanged();
        } else {
          allocatedContainersBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getAllocatedContainersBuilder(
          int index) {
        return getAllocatedContainersFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getAllocatedContainersOrBuilder(
          int index) {
        if (allocatedContainersBuilder_ == null) {
          return allocatedContainers_.get(index);  } else {
          return allocatedContainersBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
           getAllocatedContainersOrBuilderList() {
        if (allocatedContainersBuilder_ != null) {
          return allocatedContainersBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(allocatedContainers_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addAllocatedContainersBuilder() {
        return getAllocatedContainersFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addAllocatedContainersBuilder(
          int index) {
        return getAllocatedContainersFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto allocated_containers = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder> 
           getAllocatedContainersBuilderList() {
        return getAllocatedContainersFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
          getAllocatedContainersFieldBuilder() {
        if (allocatedContainersBuilder_ == null) {
          allocatedContainersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>(
                  allocatedContainers_,
                  ((bitField0_ & 0x00000004) != 0),
                  getParentForChildren(),
                  isClean());
          allocatedContainers_ = null;
        }
        return allocatedContainersBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> completedContainerStatuses_ =
        java.util.Collections.emptyList();
      private void ensureCompletedContainerStatusesIsMutable() {
        if (!((bitField0_ & 0x00000008) != 0)) {
          completedContainerStatuses_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto>(completedContainerStatuses_);
          bitField0_ |= 0x00000008;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> completedContainerStatusesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> getCompletedContainerStatusesList() {
        if (completedContainerStatusesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(completedContainerStatuses_);
        } else {
          return completedContainerStatusesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public int getCompletedContainerStatusesCount() {
        if (completedContainerStatusesBuilder_ == null) {
          return completedContainerStatuses_.size();
        } else {
          return completedContainerStatusesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getCompletedContainerStatuses(int index) {
        if (completedContainerStatusesBuilder_ == null) {
          return completedContainerStatuses_.get(index);
        } else {
          return completedContainerStatusesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public Builder setCompletedContainerStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) {
        if (completedContainerStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCompletedContainerStatusesIsMutable();
          completedContainerStatuses_.set(index, value);
          onChanged();
        } else {
          completedContainerStatusesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public Builder setCompletedContainerStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) {
        if (completedContainerStatusesBuilder_ == null) {
          ensureCompletedContainerStatusesIsMutable();
          completedContainerStatuses_.set(index, builderForValue.build());
          onChanged();
        } else {
          completedContainerStatusesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public Builder addCompletedContainerStatuses(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) {
        if (completedContainerStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCompletedContainerStatusesIsMutable();
          completedContainerStatuses_.add(value);
          onChanged();
        } else {
          completedContainerStatusesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public Builder addCompletedContainerStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) {
        if (completedContainerStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCompletedContainerStatusesIsMutable();
          completedContainerStatuses_.add(index, value);
          onChanged();
        } else {
          completedContainerStatusesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public Builder addCompletedContainerStatuses(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) {
        if (completedContainerStatusesBuilder_ == null) {
          ensureCompletedContainerStatusesIsMutable();
          completedContainerStatuses_.add(builderForValue.build());
          onChanged();
        } else {
          completedContainerStatusesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public Builder addCompletedContainerStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) {
        if (completedContainerStatusesBuilder_ == null) {
          ensureCompletedContainerStatusesIsMutable();
          completedContainerStatuses_.add(index, builderForValue.build());
          onChanged();
        } else {
          completedContainerStatusesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public Builder addAllCompletedContainerStatuses(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> values) {
        if (completedContainerStatusesBuilder_ == null) {
          ensureCompletedContainerStatusesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, completedContainerStatuses_);
          onChanged();
        } else {
          completedContainerStatusesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public Builder clearCompletedContainerStatuses() {
        if (completedContainerStatusesBuilder_ == null) {
          completedContainerStatuses_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000008);
          onChanged();
        } else {
          completedContainerStatusesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public Builder removeCompletedContainerStatuses(int index) {
        if (completedContainerStatusesBuilder_ == null) {
          ensureCompletedContainerStatusesIsMutable();
          completedContainerStatuses_.remove(index);
          onChanged();
        } else {
          completedContainerStatusesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder getCompletedContainerStatusesBuilder(
          int index) {
        return getCompletedContainerStatusesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getCompletedContainerStatusesOrBuilder(
          int index) {
        if (completedContainerStatusesBuilder_ == null) {
          return completedContainerStatuses_.get(index);  } else {
          return completedContainerStatusesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> 
           getCompletedContainerStatusesOrBuilderList() {
        if (completedContainerStatusesBuilder_ != null) {
          return completedContainerStatusesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(completedContainerStatuses_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder addCompletedContainerStatusesBuilder() {
        return getCompletedContainerStatusesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder addCompletedContainerStatusesBuilder(
          int index) {
        return getCompletedContainerStatusesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder> 
           getCompletedContainerStatusesBuilderList() {
        return getCompletedContainerStatusesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> 
          getCompletedContainerStatusesFieldBuilder() {
        if (completedContainerStatusesBuilder_ == null) {
          completedContainerStatusesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder>(
                  completedContainerStatuses_,
                  ((bitField0_ & 0x00000008) != 0),
                  getParentForChildren(),
                  isClean());
          completedContainerStatuses_ = null;
        }
        return completedContainerStatusesBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto limit_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> limitBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
       * @return Whether the limit field is set.
       */
      public boolean hasLimit() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
       * @return The limit.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getLimit() {
        if (limitBuilder_ == null) {
          return limit_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : limit_;
        } else {
          return limitBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
       */
      public Builder setLimit(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (limitBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          limit_ = value;
        } else {
          limitBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
       */
      public Builder setLimit(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (limitBuilder_ == null) {
          limit_ = builderForValue.build();
        } else {
          limitBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
       */
      public Builder mergeLimit(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (limitBuilder_ == null) {
          if (((bitField0_ & 0x00000010) != 0) &&
            limit_ != null &&
            limit_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getLimitBuilder().mergeFrom(value);
          } else {
            limit_ = value;
          }
        } else {
          limitBuilder_.mergeFrom(value);
        }
        if (limit_ != null) {
          bitField0_ |= 0x00000010;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
       */
      public Builder clearLimit() {
        bitField0_ = (bitField0_ & ~0x00000010);
        limit_ = null;
        if (limitBuilder_ != null) {
          limitBuilder_.dispose();
          limitBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getLimitBuilder() {
        bitField0_ |= 0x00000010;
        onChanged();
        return getLimitFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getLimitOrBuilder() {
        if (limitBuilder_ != null) {
          return limitBuilder_.getMessageOrBuilder();
        } else {
          return limit_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : limit_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto limit = 5;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getLimitFieldBuilder() {
        if (limitBuilder_ == null) {
          limitBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getLimit(),
                  getParentForChildren(),
                  isClean());
          limit_ = null;
        }
        return limitBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> updatedNodes_ =
        java.util.Collections.emptyList();
      private void ensureUpdatedNodesIsMutable() {
        if (!((bitField0_ & 0x00000020) != 0)) {
          updatedNodes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto>(updatedNodes_);
          bitField0_ |= 0x00000020;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> updatedNodesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> getUpdatedNodesList() {
        if (updatedNodesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(updatedNodes_);
        } else {
          return updatedNodesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public int getUpdatedNodesCount() {
        if (updatedNodesBuilder_ == null) {
          return updatedNodes_.size();
        } else {
          return updatedNodesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getUpdatedNodes(int index) {
        if (updatedNodesBuilder_ == null) {
          return updatedNodes_.get(index);
        } else {
          return updatedNodesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public Builder setUpdatedNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) {
        if (updatedNodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdatedNodesIsMutable();
          updatedNodes_.set(index, value);
          onChanged();
        } else {
          updatedNodesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public Builder setUpdatedNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) {
        if (updatedNodesBuilder_ == null) {
          ensureUpdatedNodesIsMutable();
          updatedNodes_.set(index, builderForValue.build());
          onChanged();
        } else {
          updatedNodesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public Builder addUpdatedNodes(org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) {
        if (updatedNodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdatedNodesIsMutable();
          updatedNodes_.add(value);
          onChanged();
        } else {
          updatedNodesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public Builder addUpdatedNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) {
        if (updatedNodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdatedNodesIsMutable();
          updatedNodes_.add(index, value);
          onChanged();
        } else {
          updatedNodesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public Builder addUpdatedNodes(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) {
        if (updatedNodesBuilder_ == null) {
          ensureUpdatedNodesIsMutable();
          updatedNodes_.add(builderForValue.build());
          onChanged();
        } else {
          updatedNodesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public Builder addUpdatedNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) {
        if (updatedNodesBuilder_ == null) {
          ensureUpdatedNodesIsMutable();
          updatedNodes_.add(index, builderForValue.build());
          onChanged();
        } else {
          updatedNodesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public Builder addAllUpdatedNodes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> values) {
        if (updatedNodesBuilder_ == null) {
          ensureUpdatedNodesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, updatedNodes_);
          onChanged();
        } else {
          updatedNodesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public Builder clearUpdatedNodes() {
        if (updatedNodesBuilder_ == null) {
          updatedNodes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000020);
          onChanged();
        } else {
          updatedNodesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public Builder removeUpdatedNodes(int index) {
        if (updatedNodesBuilder_ == null) {
          ensureUpdatedNodesIsMutable();
          updatedNodes_.remove(index);
          onChanged();
        } else {
          updatedNodesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder getUpdatedNodesBuilder(
          int index) {
        return getUpdatedNodesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getUpdatedNodesOrBuilder(
          int index) {
        if (updatedNodesBuilder_ == null) {
          return updatedNodes_.get(index);  } else {
          return updatedNodesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> 
           getUpdatedNodesOrBuilderList() {
        if (updatedNodesBuilder_ != null) {
          return updatedNodesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(updatedNodes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder addUpdatedNodesBuilder() {
        return getUpdatedNodesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder addUpdatedNodesBuilder(
          int index) {
        return getUpdatedNodesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto updated_nodes = 6;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder> 
           getUpdatedNodesBuilderList() {
        return getUpdatedNodesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> 
          getUpdatedNodesFieldBuilder() {
        if (updatedNodesBuilder_ == null) {
          updatedNodesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder>(
                  updatedNodes_,
                  ((bitField0_ & 0x00000020) != 0),
                  getParentForChildren(),
                  isClean());
          updatedNodes_ = null;
        }
        return updatedNodesBuilder_;
      }

      private int numClusterNodes_ ;
      /**
       * <code>optional int32 num_cluster_nodes = 7;</code>
       * @return Whether the numClusterNodes field is set.
       */
      @java.lang.Override
      public boolean hasNumClusterNodes() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional int32 num_cluster_nodes = 7;</code>
       * @return The numClusterNodes.
       */
      @java.lang.Override
      public int getNumClusterNodes() {
        return numClusterNodes_;
      }
      /**
       * <code>optional int32 num_cluster_nodes = 7;</code>
       * @param value The numClusterNodes to set.
       * @return This builder for chaining.
       */
      public Builder setNumClusterNodes(int value) {

        numClusterNodes_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_cluster_nodes = 7;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumClusterNodes() {
        bitField0_ = (bitField0_ & ~0x00000040);
        numClusterNodes_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto preempt_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder> preemptBuilder_;
      /**
       * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
       * @return Whether the preempt field is set.
       */
      public boolean hasPreempt() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
       * @return The preempt.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getPreempt() {
        if (preemptBuilder_ == null) {
          return preempt_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance() : preempt_;
        } else {
          return preemptBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
       */
      public Builder setPreempt(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto value) {
        if (preemptBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          preempt_ = value;
        } else {
          preemptBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
       */
      public Builder setPreempt(
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder builderForValue) {
        if (preemptBuilder_ == null) {
          preempt_ = builderForValue.build();
        } else {
          preemptBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
       */
      public Builder mergePreempt(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto value) {
        if (preemptBuilder_ == null) {
          if (((bitField0_ & 0x00000080) != 0) &&
            preempt_ != null &&
            preempt_ != org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance()) {
            getPreemptBuilder().mergeFrom(value);
          } else {
            preempt_ = value;
          }
        } else {
          preemptBuilder_.mergeFrom(value);
        }
        if (preempt_ != null) {
          bitField0_ |= 0x00000080;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
       */
      public Builder clearPreempt() {
        bitField0_ = (bitField0_ & ~0x00000080);
        preempt_ = null;
        if (preemptBuilder_ != null) {
          preemptBuilder_.dispose();
          preemptBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder getPreemptBuilder() {
        bitField0_ |= 0x00000080;
        onChanged();
        return getPreemptFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder getPreemptOrBuilder() {
        if (preemptBuilder_ != null) {
          return preemptBuilder_.getMessageOrBuilder();
        } else {
          return preempt_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance() : preempt_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionMessageProto preempt = 8;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder> 
          getPreemptFieldBuilder() {
        if (preemptBuilder_ == null) {
          preemptBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder>(
                  getPreempt(),
                  getParentForChildren(),
                  isClean());
          preempt_ = null;
        }
        return preemptBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> nmTokens_ =
        java.util.Collections.emptyList();
      private void ensureNmTokensIsMutable() {
        if (!((bitField0_ & 0x00000100) != 0)) {
          nmTokens_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto>(nmTokens_);
          bitField0_ |= 0x00000100;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> nmTokensBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> getNmTokensList() {
        if (nmTokensBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nmTokens_);
        } else {
          return nmTokensBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public int getNmTokensCount() {
        if (nmTokensBuilder_ == null) {
          return nmTokens_.size();
        } else {
          return nmTokensBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokens(int index) {
        if (nmTokensBuilder_ == null) {
          return nmTokens_.get(index);
        } else {
          return nmTokensBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public Builder setNmTokens(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) {
        if (nmTokensBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNmTokensIsMutable();
          nmTokens_.set(index, value);
          onChanged();
        } else {
          nmTokensBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public Builder setNmTokens(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) {
        if (nmTokensBuilder_ == null) {
          ensureNmTokensIsMutable();
          nmTokens_.set(index, builderForValue.build());
          onChanged();
        } else {
          nmTokensBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public Builder addNmTokens(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) {
        if (nmTokensBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNmTokensIsMutable();
          nmTokens_.add(value);
          onChanged();
        } else {
          nmTokensBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public Builder addNmTokens(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) {
        if (nmTokensBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNmTokensIsMutable();
          nmTokens_.add(index, value);
          onChanged();
        } else {
          nmTokensBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public Builder addNmTokens(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) {
        if (nmTokensBuilder_ == null) {
          ensureNmTokensIsMutable();
          nmTokens_.add(builderForValue.build());
          onChanged();
        } else {
          nmTokensBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public Builder addNmTokens(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) {
        if (nmTokensBuilder_ == null) {
          ensureNmTokensIsMutable();
          nmTokens_.add(index, builderForValue.build());
          onChanged();
        } else {
          nmTokensBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public Builder addAllNmTokens(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto> values) {
        if (nmTokensBuilder_ == null) {
          ensureNmTokensIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nmTokens_);
          onChanged();
        } else {
          nmTokensBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public Builder clearNmTokens() {
        if (nmTokensBuilder_ == null) {
          nmTokens_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000100);
          onChanged();
        } else {
          nmTokensBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public Builder removeNmTokens(int index) {
        if (nmTokensBuilder_ == null) {
          ensureNmTokensIsMutable();
          nmTokens_.remove(index);
          onChanged();
        } else {
          nmTokensBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder getNmTokensBuilder(
          int index) {
        return getNmTokensFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensOrBuilder(
          int index) {
        if (nmTokensBuilder_ == null) {
          return nmTokens_.get(index);  } else {
          return nmTokensBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> 
           getNmTokensOrBuilderList() {
        if (nmTokensBuilder_ != null) {
          return nmTokensBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nmTokens_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder addNmTokensBuilder() {
        return getNmTokensFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder addNmTokensBuilder(
          int index) {
        return getNmTokensFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NMTokenProto nm_tokens = 9;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder> 
           getNmTokensBuilderList() {
        return getNmTokensFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> 
          getNmTokensFieldBuilder() {
        if (nmTokensBuilder_ == null) {
          nmTokensBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder>(
                  nmTokens_,
                  ((bitField0_ & 0x00000100) != 0),
                  getParentForChildren(),
                  isClean());
          nmTokens_ = null;
        }
        return nmTokensBuilder_;
      }

      private org.apache.hadoop.security.proto.SecurityProtos.TokenProto amRmToken_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> amRmTokenBuilder_;
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
       * @return Whether the amRmToken field is set.
       */
      public boolean hasAmRmToken() {
        return ((bitField0_ & 0x00000200) != 0);
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
       * @return The amRmToken.
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken() {
        if (amRmTokenBuilder_ == null) {
          return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_;
        } else {
          return amRmTokenBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
       */
      public Builder setAmRmToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (amRmTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          amRmToken_ = value;
        } else {
          amRmTokenBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
       */
      public Builder setAmRmToken(
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (amRmTokenBuilder_ == null) {
          amRmToken_ = builderForValue.build();
        } else {
          amRmTokenBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
       */
      public Builder mergeAmRmToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (amRmTokenBuilder_ == null) {
          if (((bitField0_ & 0x00000200) != 0) &&
            amRmToken_ != null &&
            amRmToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) {
            getAmRmTokenBuilder().mergeFrom(value);
          } else {
            amRmToken_ = value;
          }
        } else {
          amRmTokenBuilder_.mergeFrom(value);
        }
        if (amRmToken_ != null) {
          bitField0_ |= 0x00000200;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
       */
      public Builder clearAmRmToken() {
        bitField0_ = (bitField0_ & ~0x00000200);
        amRmToken_ = null;
        if (amRmTokenBuilder_ != null) {
          amRmTokenBuilder_.dispose();
          amRmTokenBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getAmRmTokenBuilder() {
        bitField0_ |= 0x00000200;
        onChanged();
        return getAmRmTokenFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder() {
        if (amRmTokenBuilder_ != null) {
          return amRmTokenBuilder_.getMessageOrBuilder();
        } else {
          return amRmToken_ == null ?
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_;
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 12;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
          getAmRmTokenFieldBuilder() {
        if (amRmTokenBuilder_ == null) {
          amRmTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>(
                  getAmRmToken(),
                  getParentForChildren(),
                  isClean());
          amRmToken_ = null;
        }
        return amRmTokenBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> applicationPriorityBuilder_;
      /**
       * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
       * @return Whether the applicationPriority field is set.
       */
      public boolean hasApplicationPriority() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
       * @return The applicationPriority.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() {
        if (applicationPriorityBuilder_ == null) {
          return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
        } else {
          return applicationPriorityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
       */
      public Builder setApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (applicationPriorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationPriority_ = value;
        } else {
          applicationPriorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
       */
      public Builder setApplicationPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (applicationPriorityBuilder_ == null) {
          applicationPriority_ = builderForValue.build();
        } else {
          applicationPriorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
       */
      public Builder mergeApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (applicationPriorityBuilder_ == null) {
          if (((bitField0_ & 0x00000400) != 0) &&
            applicationPriority_ != null &&
            applicationPriority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            getApplicationPriorityBuilder().mergeFrom(value);
          } else {
            applicationPriority_ = value;
          }
        } else {
          applicationPriorityBuilder_.mergeFrom(value);
        }
        if (applicationPriority_ != null) {
          bitField0_ |= 0x00000400;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
       */
      public Builder clearApplicationPriority() {
        bitField0_ = (bitField0_ & ~0x00000400);
        applicationPriority_ = null;
        if (applicationPriorityBuilder_ != null) {
          applicationPriorityBuilder_.dispose();
          applicationPriorityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getApplicationPriorityBuilder() {
        bitField0_ |= 0x00000400;
        onChanged();
        return getApplicationPriorityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() {
        if (applicationPriorityBuilder_ != null) {
          return applicationPriorityBuilder_.getMessageOrBuilder();
        } else {
          return applicationPriority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto application_priority = 13;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getApplicationPriorityFieldBuilder() {
        if (applicationPriorityBuilder_ == null) {
          applicationPriorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getApplicationPriority(),
                  getParentForChildren(),
                  isClean());
          applicationPriority_ = null;
        }
        return applicationPriorityBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto collectorInfo_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder> collectorInfoBuilder_;
      /**
       * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
       * @return Whether the collectorInfo field is set.
       */
      public boolean hasCollectorInfo() {
        return ((bitField0_ & 0x00000800) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
       * @return The collectorInfo.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getCollectorInfo() {
        if (collectorInfoBuilder_ == null) {
          return collectorInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance() : collectorInfo_;
        } else {
          return collectorInfoBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
       */
      public Builder setCollectorInfo(org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto value) {
        if (collectorInfoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          collectorInfo_ = value;
        } else {
          collectorInfoBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
       */
      public Builder setCollectorInfo(
          org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder builderForValue) {
        if (collectorInfoBuilder_ == null) {
          collectorInfo_ = builderForValue.build();
        } else {
          collectorInfoBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
       */
      public Builder mergeCollectorInfo(org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto value) {
        if (collectorInfoBuilder_ == null) {
          if (((bitField0_ & 0x00000800) != 0) &&
            collectorInfo_ != null &&
            collectorInfo_ != org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance()) {
            getCollectorInfoBuilder().mergeFrom(value);
          } else {
            collectorInfo_ = value;
          }
        } else {
          collectorInfoBuilder_.mergeFrom(value);
        }
        if (collectorInfo_ != null) {
          bitField0_ |= 0x00000800;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
       */
      public Builder clearCollectorInfo() {
        bitField0_ = (bitField0_ & ~0x00000800);
        collectorInfo_ = null;
        if (collectorInfoBuilder_ != null) {
          collectorInfoBuilder_.dispose();
          collectorInfoBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder getCollectorInfoBuilder() {
        bitField0_ |= 0x00000800;
        onChanged();
        return getCollectorInfoFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder getCollectorInfoOrBuilder() {
        if (collectorInfoBuilder_ != null) {
          return collectorInfoBuilder_.getMessageOrBuilder();
        } else {
          return collectorInfo_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance() : collectorInfo_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.CollectorInfoProto collector_info = 14;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder> 
          getCollectorInfoFieldBuilder() {
        if (collectorInfoBuilder_ == null) {
          collectorInfoBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder>(
                  getCollectorInfo(),
                  getParentForChildren(),
                  isClean());
          collectorInfo_ = null;
        }
        return collectorInfoBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto> updateErrors_ =
        java.util.Collections.emptyList();
      private void ensureUpdateErrorsIsMutable() {
        if (!((bitField0_ & 0x00001000) != 0)) {
          updateErrors_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto>(updateErrors_);
          bitField0_ |= 0x00001000;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder> updateErrorsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto> getUpdateErrorsList() {
        if (updateErrorsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(updateErrors_);
        } else {
          return updateErrorsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public int getUpdateErrorsCount() {
        if (updateErrorsBuilder_ == null) {
          return updateErrors_.size();
        } else {
          return updateErrorsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getUpdateErrors(int index) {
        if (updateErrorsBuilder_ == null) {
          return updateErrors_.get(index);
        } else {
          return updateErrorsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public Builder setUpdateErrors(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto value) {
        if (updateErrorsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdateErrorsIsMutable();
          updateErrors_.set(index, value);
          onChanged();
        } else {
          updateErrorsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public Builder setUpdateErrors(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder builderForValue) {
        if (updateErrorsBuilder_ == null) {
          ensureUpdateErrorsIsMutable();
          updateErrors_.set(index, builderForValue.build());
          onChanged();
        } else {
          updateErrorsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public Builder addUpdateErrors(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto value) {
        if (updateErrorsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdateErrorsIsMutable();
          updateErrors_.add(value);
          onChanged();
        } else {
          updateErrorsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public Builder addUpdateErrors(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto value) {
        if (updateErrorsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdateErrorsIsMutable();
          updateErrors_.add(index, value);
          onChanged();
        } else {
          updateErrorsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public Builder addUpdateErrors(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder builderForValue) {
        if (updateErrorsBuilder_ == null) {
          ensureUpdateErrorsIsMutable();
          updateErrors_.add(builderForValue.build());
          onChanged();
        } else {
          updateErrorsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public Builder addUpdateErrors(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder builderForValue) {
        if (updateErrorsBuilder_ == null) {
          ensureUpdateErrorsIsMutable();
          updateErrors_.add(index, builderForValue.build());
          onChanged();
        } else {
          updateErrorsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public Builder addAllUpdateErrors(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto> values) {
        if (updateErrorsBuilder_ == null) {
          ensureUpdateErrorsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, updateErrors_);
          onChanged();
        } else {
          updateErrorsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public Builder clearUpdateErrors() {
        if (updateErrorsBuilder_ == null) {
          updateErrors_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00001000);
          onChanged();
        } else {
          updateErrorsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public Builder removeUpdateErrors(int index) {
        if (updateErrorsBuilder_ == null) {
          ensureUpdateErrorsIsMutable();
          updateErrors_.remove(index);
          onChanged();
        } else {
          updateErrorsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder getUpdateErrorsBuilder(
          int index) {
        return getUpdateErrorsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder getUpdateErrorsOrBuilder(
          int index) {
        if (updateErrorsBuilder_ == null) {
          return updateErrors_.get(index);  } else {
          return updateErrorsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder> 
           getUpdateErrorsOrBuilderList() {
        if (updateErrorsBuilder_ != null) {
          return updateErrorsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(updateErrors_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder addUpdateErrorsBuilder() {
        return getUpdateErrorsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder addUpdateErrorsBuilder(
          int index) {
        return getUpdateErrorsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder> 
           getUpdateErrorsBuilderList() {
        return getUpdateErrorsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder> 
          getUpdateErrorsFieldBuilder() {
        if (updateErrorsBuilder_ == null) {
          updateErrorsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder>(
                  updateErrors_,
                  ((bitField0_ & 0x00001000) != 0),
                  getParentForChildren(),
                  isClean());
          updateErrors_ = null;
        }
        return updateErrorsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto> updatedContainers_ =
        java.util.Collections.emptyList();
      private void ensureUpdatedContainersIsMutable() {
        if (!((bitField0_ & 0x00002000) != 0)) {
          updatedContainers_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto>(updatedContainers_);
          bitField0_ |= 0x00002000;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder> updatedContainersBuilder_;

      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto> getUpdatedContainersList() {
        if (updatedContainersBuilder_ == null) {
          return java.util.Collections.unmodifiableList(updatedContainers_);
        } else {
          return updatedContainersBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public int getUpdatedContainersCount() {
        if (updatedContainersBuilder_ == null) {
          return updatedContainers_.size();
        } else {
          return updatedContainersBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getUpdatedContainers(int index) {
        if (updatedContainersBuilder_ == null) {
          return updatedContainers_.get(index);
        } else {
          return updatedContainersBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public Builder setUpdatedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto value) {
        if (updatedContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdatedContainersIsMutable();
          updatedContainers_.set(index, value);
          onChanged();
        } else {
          updatedContainersBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public Builder setUpdatedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder builderForValue) {
        if (updatedContainersBuilder_ == null) {
          ensureUpdatedContainersIsMutable();
          updatedContainers_.set(index, builderForValue.build());
          onChanged();
        } else {
          updatedContainersBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public Builder addUpdatedContainers(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto value) {
        if (updatedContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdatedContainersIsMutable();
          updatedContainers_.add(value);
          onChanged();
        } else {
          updatedContainersBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public Builder addUpdatedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto value) {
        if (updatedContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdatedContainersIsMutable();
          updatedContainers_.add(index, value);
          onChanged();
        } else {
          updatedContainersBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public Builder addUpdatedContainers(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder builderForValue) {
        if (updatedContainersBuilder_ == null) {
          ensureUpdatedContainersIsMutable();
          updatedContainers_.add(builderForValue.build());
          onChanged();
        } else {
          updatedContainersBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public Builder addUpdatedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder builderForValue) {
        if (updatedContainersBuilder_ == null) {
          ensureUpdatedContainersIsMutable();
          updatedContainers_.add(index, builderForValue.build());
          onChanged();
        } else {
          updatedContainersBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public Builder addAllUpdatedContainers(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto> values) {
        if (updatedContainersBuilder_ == null) {
          ensureUpdatedContainersIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, updatedContainers_);
          onChanged();
        } else {
          updatedContainersBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public Builder clearUpdatedContainers() {
        if (updatedContainersBuilder_ == null) {
          updatedContainers_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00002000);
          onChanged();
        } else {
          updatedContainersBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public Builder removeUpdatedContainers(int index) {
        if (updatedContainersBuilder_ == null) {
          ensureUpdatedContainersIsMutable();
          updatedContainers_.remove(index);
          onChanged();
        } else {
          updatedContainersBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder getUpdatedContainersBuilder(
          int index) {
        return getUpdatedContainersFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder getUpdatedContainersOrBuilder(
          int index) {
        if (updatedContainersBuilder_ == null) {
          return updatedContainers_.get(index);  } else {
          return updatedContainersBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder> 
           getUpdatedContainersOrBuilderList() {
        if (updatedContainersBuilder_ != null) {
          return updatedContainersBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(updatedContainers_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder addUpdatedContainersBuilder() {
        return getUpdatedContainersFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder addUpdatedContainersBuilder(
          int index) {
        return getUpdatedContainersFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder> 
           getUpdatedContainersBuilderList() {
        return getUpdatedContainersFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder> 
          getUpdatedContainersFieldBuilder() {
        if (updatedContainersBuilder_ == null) {
          updatedContainersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder>(
                  updatedContainers_,
                  ((bitField0_ & 0x00002000) != 0),
                  getParentForChildren(),
                  isClean());
          updatedContainers_ = null;
        }
        return updatedContainersBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> containersFromPreviousAttempts_ =
        java.util.Collections.emptyList();
      private void ensureContainersFromPreviousAttemptsIsMutable() {
        if (!((bitField0_ & 0x00004000) != 0)) {
          containersFromPreviousAttempts_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto>(containersFromPreviousAttempts_);
          bitField0_ |= 0x00004000;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> containersFromPreviousAttemptsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> getContainersFromPreviousAttemptsList() {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(containersFromPreviousAttempts_);
        } else {
          return containersFromPreviousAttemptsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public int getContainersFromPreviousAttemptsCount() {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          return containersFromPreviousAttempts_.size();
        } else {
          return containersFromPreviousAttemptsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          return containersFromPreviousAttempts_.get(index);
        } else {
          return containersFromPreviousAttemptsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public Builder setContainersFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.set(index, value);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public Builder setContainersFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.set(index, builderForValue.build());
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public Builder addContainersFromPreviousAttempts(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.add(value);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public Builder addContainersFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.add(index, value);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public Builder addContainersFromPreviousAttempts(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.add(builderForValue.build());
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public Builder addContainersFromPreviousAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.add(index, builderForValue.build());
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public Builder addAllContainersFromPreviousAttempts(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> values) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          ensureContainersFromPreviousAttemptsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, containersFromPreviousAttempts_);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public Builder clearContainersFromPreviousAttempts() {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          containersFromPreviousAttempts_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00004000);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public Builder removeContainersFromPreviousAttempts(int index) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          ensureContainersFromPreviousAttemptsIsMutable();
          containersFromPreviousAttempts_.remove(index);
          onChanged();
        } else {
          containersFromPreviousAttemptsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getContainersFromPreviousAttemptsBuilder(
          int index) {
        return getContainersFromPreviousAttemptsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder(
          int index) {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          return containersFromPreviousAttempts_.get(index);  } else {
          return containersFromPreviousAttemptsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
           getContainersFromPreviousAttemptsOrBuilderList() {
        if (containersFromPreviousAttemptsBuilder_ != null) {
          return containersFromPreviousAttemptsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(containersFromPreviousAttempts_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersFromPreviousAttemptsBuilder() {
        return getContainersFromPreviousAttemptsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersFromPreviousAttemptsBuilder(
          int index) {
        return getContainersFromPreviousAttemptsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder> 
           getContainersFromPreviousAttemptsBuilderList() {
        return getContainersFromPreviousAttemptsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
          getContainersFromPreviousAttemptsFieldBuilder() {
        if (containersFromPreviousAttemptsBuilder_ == null) {
          containersFromPreviousAttemptsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>(
                  containersFromPreviousAttempts_,
                  ((bitField0_ & 0x00004000) != 0),
                  getParentForChildren(),
                  isClean());
          containersFromPreviousAttempts_ = null;
        }
        return containersFromPreviousAttemptsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto> rejectedSchedulingRequests_ =
        java.util.Collections.emptyList();
      private void ensureRejectedSchedulingRequestsIsMutable() {
        if (!((bitField0_ & 0x00008000) != 0)) {
          rejectedSchedulingRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto>(rejectedSchedulingRequests_);
          bitField0_ |= 0x00008000;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder> rejectedSchedulingRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto> getRejectedSchedulingRequestsList() {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(rejectedSchedulingRequests_);
        } else {
          return rejectedSchedulingRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public int getRejectedSchedulingRequestsCount() {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          return rejectedSchedulingRequests_.size();
        } else {
          return rejectedSchedulingRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getRejectedSchedulingRequests(int index) {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          return rejectedSchedulingRequests_.get(index);
        } else {
          return rejectedSchedulingRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public Builder setRejectedSchedulingRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto value) {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureRejectedSchedulingRequestsIsMutable();
          rejectedSchedulingRequests_.set(index, value);
          onChanged();
        } else {
          rejectedSchedulingRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public Builder setRejectedSchedulingRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder builderForValue) {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          ensureRejectedSchedulingRequestsIsMutable();
          rejectedSchedulingRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          rejectedSchedulingRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public Builder addRejectedSchedulingRequests(org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto value) {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureRejectedSchedulingRequestsIsMutable();
          rejectedSchedulingRequests_.add(value);
          onChanged();
        } else {
          rejectedSchedulingRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public Builder addRejectedSchedulingRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto value) {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureRejectedSchedulingRequestsIsMutable();
          rejectedSchedulingRequests_.add(index, value);
          onChanged();
        } else {
          rejectedSchedulingRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public Builder addRejectedSchedulingRequests(
          org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder builderForValue) {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          ensureRejectedSchedulingRequestsIsMutable();
          rejectedSchedulingRequests_.add(builderForValue.build());
          onChanged();
        } else {
          rejectedSchedulingRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public Builder addRejectedSchedulingRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder builderForValue) {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          ensureRejectedSchedulingRequestsIsMutable();
          rejectedSchedulingRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          rejectedSchedulingRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public Builder addAllRejectedSchedulingRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto> values) {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          ensureRejectedSchedulingRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, rejectedSchedulingRequests_);
          onChanged();
        } else {
          rejectedSchedulingRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public Builder clearRejectedSchedulingRequests() {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          rejectedSchedulingRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00008000);
          onChanged();
        } else {
          rejectedSchedulingRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public Builder removeRejectedSchedulingRequests(int index) {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          ensureRejectedSchedulingRequestsIsMutable();
          rejectedSchedulingRequests_.remove(index);
          onChanged();
        } else {
          rejectedSchedulingRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder getRejectedSchedulingRequestsBuilder(
          int index) {
        return getRejectedSchedulingRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder getRejectedSchedulingRequestsOrBuilder(
          int index) {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          return rejectedSchedulingRequests_.get(index);  } else {
          return rejectedSchedulingRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder> 
           getRejectedSchedulingRequestsOrBuilderList() {
        if (rejectedSchedulingRequestsBuilder_ != null) {
          return rejectedSchedulingRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(rejectedSchedulingRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder addRejectedSchedulingRequestsBuilder() {
        return getRejectedSchedulingRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder addRejectedSchedulingRequestsBuilder(
          int index) {
        return getRejectedSchedulingRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder> 
           getRejectedSchedulingRequestsBuilderList() {
        return getRejectedSchedulingRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder> 
          getRejectedSchedulingRequestsFieldBuilder() {
        if (rejectedSchedulingRequestsBuilder_ == null) {
          rejectedSchedulingRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder>(
                  rejectedSchedulingRequests_,
                  ((bitField0_ & 0x00008000) != 0),
                  getParentForChildren(),
                  isClean());
          rejectedSchedulingRequests_ = null;
        }
        return rejectedSchedulingRequestsBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto enhancedHeadroom_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProtoOrBuilder> enhancedHeadroomBuilder_;
      /**
       * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
       * @return Whether the enhancedHeadroom field is set.
       */
      public boolean hasEnhancedHeadroom() {
        return ((bitField0_ & 0x00010000) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
       * @return The enhancedHeadroom.
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto getEnhancedHeadroom() {
        if (enhancedHeadroomBuilder_ == null) {
          return enhancedHeadroom_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.getDefaultInstance() : enhancedHeadroom_;
        } else {
          return enhancedHeadroomBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
       */
      public Builder setEnhancedHeadroom(org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto value) {
        if (enhancedHeadroomBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          enhancedHeadroom_ = value;
        } else {
          enhancedHeadroomBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00010000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
       */
      public Builder setEnhancedHeadroom(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.Builder builderForValue) {
        if (enhancedHeadroomBuilder_ == null) {
          enhancedHeadroom_ = builderForValue.build();
        } else {
          enhancedHeadroomBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00010000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
       */
      public Builder mergeEnhancedHeadroom(org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto value) {
        if (enhancedHeadroomBuilder_ == null) {
          if (((bitField0_ & 0x00010000) != 0) &&
            enhancedHeadroom_ != null &&
            enhancedHeadroom_ != org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.getDefaultInstance()) {
            getEnhancedHeadroomBuilder().mergeFrom(value);
          } else {
            enhancedHeadroom_ = value;
          }
        } else {
          enhancedHeadroomBuilder_.mergeFrom(value);
        }
        if (enhancedHeadroom_ != null) {
          bitField0_ |= 0x00010000;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
       */
      public Builder clearEnhancedHeadroom() {
        bitField0_ = (bitField0_ & ~0x00010000);
        enhancedHeadroom_ = null;
        if (enhancedHeadroomBuilder_ != null) {
          enhancedHeadroomBuilder_.dispose();
          enhancedHeadroomBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.Builder getEnhancedHeadroomBuilder() {
        bitField0_ |= 0x00010000;
        onChanged();
        return getEnhancedHeadroomFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProtoOrBuilder getEnhancedHeadroomOrBuilder() {
        if (enhancedHeadroomBuilder_ != null) {
          return enhancedHeadroomBuilder_.getMessageOrBuilder();
        } else {
          return enhancedHeadroom_ == null ?
              org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.getDefaultInstance() : enhancedHeadroom_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.EnhancedHeadroomProto enhanced_headroom = 19;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProtoOrBuilder> 
          getEnhancedHeadroomFieldBuilder() {
        if (enhancedHeadroomBuilder_ == null) {
          enhancedHeadroomBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.EnhancedHeadroomProtoOrBuilder>(
                  getEnhancedHeadroom(),
                  getParentForChildren(),
                  isClean());
          enhancedHeadroom_ = null;
        }
        return enhancedHeadroomBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.AllocateResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.AllocateResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<AllocateResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<AllocateResponseProto>() {
      @java.lang.Override
      public AllocateResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<AllocateResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<AllocateResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetNewApplicationRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNewApplicationRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetNewApplicationRequestProto}
   */
  public static final class GetNewApplicationRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNewApplicationRequestProto)
      GetNewApplicationRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetNewApplicationRequestProto.newBuilder() to construct.
    private GetNewApplicationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetNewApplicationRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetNewApplicationRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetNewApplicationRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNewApplicationRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNewApplicationRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNewApplicationRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetNewApplicationRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetNewApplicationRequestProto>() {
      @java.lang.Override
      public GetNewApplicationRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetNewApplicationRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetNewApplicationRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetNewApplicationResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNewApplicationResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
     * @return Whether the maximumCapability field is set.
     */
    boolean hasMaximumCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
     * @return The maximumCapability.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetNewApplicationResponseProto}
   */
  public static final class GetNewApplicationResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNewApplicationResponseProto)
      GetNewApplicationResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetNewApplicationResponseProto.newBuilder() to construct.
    private GetNewApplicationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetNewApplicationResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetNewApplicationResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int MAXIMUMCAPABILITY_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto maximumCapability_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
     * @return Whether the maximumCapability field is set.
     */
    @java.lang.Override
    public boolean hasMaximumCapability() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
     * @return The maximumCapability.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability() {
      return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder() {
      return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasMaximumCapability()) {
        if (!getMaximumCapability().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getMaximumCapability());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getMaximumCapability());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasMaximumCapability() != other.hasMaximumCapability()) return false;
      if (hasMaximumCapability()) {
        if (!getMaximumCapability()
            .equals(other.getMaximumCapability())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasMaximumCapability()) {
        hash = (37 * hash) + MAXIMUMCAPABILITY_FIELD_NUMBER;
        hash = (53 * hash) + getMaximumCapability().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetNewApplicationResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNewApplicationResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
          getMaximumCapabilityFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        maximumCapability_ = null;
        if (maximumCapabilityBuilder_ != null) {
          maximumCapabilityBuilder_.dispose();
          maximumCapabilityBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.maximumCapability_ = maximumCapabilityBuilder_ == null
              ? maximumCapability_
              : maximumCapabilityBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasMaximumCapability()) {
          mergeMaximumCapability(other.getMaximumCapability());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasMaximumCapability()) {
          if (!getMaximumCapability().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getMaximumCapabilityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto maximumCapability_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> maximumCapabilityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
       * @return Whether the maximumCapability field is set.
       */
      public boolean hasMaximumCapability() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
       * @return The maximumCapability.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability() {
        if (maximumCapabilityBuilder_ == null) {
          return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_;
        } else {
          return maximumCapabilityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
       */
      public Builder setMaximumCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (maximumCapabilityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          maximumCapability_ = value;
        } else {
          maximumCapabilityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
       */
      public Builder setMaximumCapability(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (maximumCapabilityBuilder_ == null) {
          maximumCapability_ = builderForValue.build();
        } else {
          maximumCapabilityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
       */
      public Builder mergeMaximumCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (maximumCapabilityBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            maximumCapability_ != null &&
            maximumCapability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getMaximumCapabilityBuilder().mergeFrom(value);
          } else {
            maximumCapability_ = value;
          }
        } else {
          maximumCapabilityBuilder_.mergeFrom(value);
        }
        if (maximumCapability_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
       */
      public Builder clearMaximumCapability() {
        bitField0_ = (bitField0_ & ~0x00000002);
        maximumCapability_ = null;
        if (maximumCapabilityBuilder_ != null) {
          maximumCapabilityBuilder_.dispose();
          maximumCapabilityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getMaximumCapabilityBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getMaximumCapabilityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder() {
        if (maximumCapabilityBuilder_ != null) {
          return maximumCapabilityBuilder_.getMessageOrBuilder();
        } else {
          return maximumCapability_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto maximumCapability = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getMaximumCapabilityFieldBuilder() {
        if (maximumCapabilityBuilder_ == null) {
          maximumCapabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getMaximumCapability(),
                  getParentForChildren(),
                  isClean());
          maximumCapability_ = null;
        }
        return maximumCapabilityBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNewApplicationResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNewApplicationResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetNewApplicationResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetNewApplicationResponseProto>() {
      @java.lang.Override
      public GetNewApplicationResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetNewApplicationResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetNewApplicationResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetApplicationReportRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationReportRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetApplicationReportRequestProto}
   */
  public static final class GetApplicationReportRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationReportRequestProto)
      GetApplicationReportRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetApplicationReportRequestProto.newBuilder() to construct.
    private GetApplicationReportRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetApplicationReportRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetApplicationReportRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetApplicationReportRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationReportRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationReportRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationReportRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationReportRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetApplicationReportRequestProto>() {
      @java.lang.Override
      public GetApplicationReportRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationReportRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationReportRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetApplicationReportResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationReportResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
     * @return Whether the applicationReport field is set.
     */
    boolean hasApplicationReport();
    /**
     * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
     * @return The applicationReport.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplicationReport();
    /**
     * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationReportOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetApplicationReportResponseProto}
   */
  public static final class GetApplicationReportResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationReportResponseProto)
      GetApplicationReportResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetApplicationReportResponseProto.newBuilder() to construct.
    private GetApplicationReportResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetApplicationReportResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetApplicationReportResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_REPORT_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto applicationReport_;
    /**
     * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
     * @return Whether the applicationReport field is set.
     */
    @java.lang.Override
    public boolean hasApplicationReport() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
     * @return The applicationReport.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplicationReport() {
      return applicationReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance() : applicationReport_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationReportOrBuilder() {
      return applicationReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance() : applicationReport_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasApplicationReport()) {
        if (!getApplicationReport().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationReport());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationReport());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto) obj;

      if (hasApplicationReport() != other.hasApplicationReport()) return false;
      if (hasApplicationReport()) {
        if (!getApplicationReport()
            .equals(other.getApplicationReport())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationReport()) {
        hash = (37 * hash) + APPLICATION_REPORT_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationReport().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetApplicationReportResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationReportResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationReportFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationReport_ = null;
        if (applicationReportBuilder_ != null) {
          applicationReportBuilder_.dispose();
          applicationReportBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationReport_ = applicationReportBuilder_ == null
              ? applicationReport_
              : applicationReportBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.getDefaultInstance()) return this;
        if (other.hasApplicationReport()) {
          mergeApplicationReport(other.getApplicationReport());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasApplicationReport()) {
          if (!getApplicationReport().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationReportFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto applicationReport_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> applicationReportBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
       * @return Whether the applicationReport field is set.
       */
      public boolean hasApplicationReport() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
       * @return The applicationReport.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplicationReport() {
        if (applicationReportBuilder_ == null) {
          return applicationReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance() : applicationReport_;
        } else {
          return applicationReportBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
       */
      public Builder setApplicationReport(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) {
        if (applicationReportBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationReport_ = value;
        } else {
          applicationReportBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
       */
      public Builder setApplicationReport(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) {
        if (applicationReportBuilder_ == null) {
          applicationReport_ = builderForValue.build();
        } else {
          applicationReportBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
       */
      public Builder mergeApplicationReport(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) {
        if (applicationReportBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationReport_ != null &&
            applicationReport_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance()) {
            getApplicationReportBuilder().mergeFrom(value);
          } else {
            applicationReport_ = value;
          }
        } else {
          applicationReportBuilder_.mergeFrom(value);
        }
        if (applicationReport_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
       */
      public Builder clearApplicationReport() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationReport_ = null;
        if (applicationReportBuilder_ != null) {
          applicationReportBuilder_.dispose();
          applicationReportBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder getApplicationReportBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationReportFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationReportOrBuilder() {
        if (applicationReportBuilder_ != null) {
          return applicationReportBuilder_.getMessageOrBuilder();
        } else {
          return applicationReport_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance() : applicationReport_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationReportProto application_report = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> 
          getApplicationReportFieldBuilder() {
        if (applicationReportBuilder_ == null) {
          applicationReportBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder>(
                  getApplicationReport(),
                  getParentForChildren(),
                  isClean());
          applicationReport_ = null;
        }
        return applicationReportBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationReportResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationReportResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationReportResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetApplicationReportResponseProto>() {
      @java.lang.Override
      public GetApplicationReportResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationReportResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationReportResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface SubmitApplicationRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.SubmitApplicationRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
     * @return Whether the applicationSubmissionContext field is set.
     */
    boolean hasApplicationSubmissionContext();
    /**
     * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
     * @return The applicationSubmissionContext.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getApplicationSubmissionContext();
    /**
     * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder getApplicationSubmissionContextOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.SubmitApplicationRequestProto}
   */
  public static final class SubmitApplicationRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.SubmitApplicationRequestProto)
      SubmitApplicationRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use SubmitApplicationRequestProto.newBuilder() to construct.
    private SubmitApplicationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private SubmitApplicationRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new SubmitApplicationRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_SUBMISSION_CONTEXT_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto applicationSubmissionContext_;
    /**
     * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
     * @return Whether the applicationSubmissionContext field is set.
     */
    @java.lang.Override
    public boolean hasApplicationSubmissionContext() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
     * @return The applicationSubmissionContext.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getApplicationSubmissionContext() {
      return applicationSubmissionContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : applicationSubmissionContext_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder getApplicationSubmissionContextOrBuilder() {
      return applicationSubmissionContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : applicationSubmissionContext_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasApplicationSubmissionContext()) {
        if (!getApplicationSubmissionContext().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationSubmissionContext());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationSubmissionContext());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto) obj;

      if (hasApplicationSubmissionContext() != other.hasApplicationSubmissionContext()) return false;
      if (hasApplicationSubmissionContext()) {
        if (!getApplicationSubmissionContext()
            .equals(other.getApplicationSubmissionContext())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationSubmissionContext()) {
        hash = (37 * hash) + APPLICATION_SUBMISSION_CONTEXT_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationSubmissionContext().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.SubmitApplicationRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.SubmitApplicationRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationSubmissionContextFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationSubmissionContext_ = null;
        if (applicationSubmissionContextBuilder_ != null) {
          applicationSubmissionContextBuilder_.dispose();
          applicationSubmissionContextBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationSubmissionContext_ = applicationSubmissionContextBuilder_ == null
              ? applicationSubmissionContext_
              : applicationSubmissionContextBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationSubmissionContext()) {
          mergeApplicationSubmissionContext(other.getApplicationSubmissionContext());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasApplicationSubmissionContext()) {
          if (!getApplicationSubmissionContext().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationSubmissionContextFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto applicationSubmissionContext_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder> applicationSubmissionContextBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
       * @return Whether the applicationSubmissionContext field is set.
       */
      public boolean hasApplicationSubmissionContext() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
       * @return The applicationSubmissionContext.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getApplicationSubmissionContext() {
        if (applicationSubmissionContextBuilder_ == null) {
          return applicationSubmissionContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : applicationSubmissionContext_;
        } else {
          return applicationSubmissionContextBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
       */
      public Builder setApplicationSubmissionContext(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto value) {
        if (applicationSubmissionContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationSubmissionContext_ = value;
        } else {
          applicationSubmissionContextBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
       */
      public Builder setApplicationSubmissionContext(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder builderForValue) {
        if (applicationSubmissionContextBuilder_ == null) {
          applicationSubmissionContext_ = builderForValue.build();
        } else {
          applicationSubmissionContextBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
       */
      public Builder mergeApplicationSubmissionContext(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto value) {
        if (applicationSubmissionContextBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationSubmissionContext_ != null &&
            applicationSubmissionContext_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance()) {
            getApplicationSubmissionContextBuilder().mergeFrom(value);
          } else {
            applicationSubmissionContext_ = value;
          }
        } else {
          applicationSubmissionContextBuilder_.mergeFrom(value);
        }
        if (applicationSubmissionContext_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
       */
      public Builder clearApplicationSubmissionContext() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationSubmissionContext_ = null;
        if (applicationSubmissionContextBuilder_ != null) {
          applicationSubmissionContextBuilder_.dispose();
          applicationSubmissionContextBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder getApplicationSubmissionContextBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationSubmissionContextFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder getApplicationSubmissionContextOrBuilder() {
        if (applicationSubmissionContextBuilder_ != null) {
          return applicationSubmissionContextBuilder_.getMessageOrBuilder();
        } else {
          return applicationSubmissionContext_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : applicationSubmissionContext_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder> 
          getApplicationSubmissionContextFieldBuilder() {
        if (applicationSubmissionContextBuilder_ == null) {
          applicationSubmissionContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder>(
                  getApplicationSubmissionContext(),
                  getParentForChildren(),
                  isClean());
          applicationSubmissionContext_ = null;
        }
        return applicationSubmissionContextBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.SubmitApplicationRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.SubmitApplicationRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<SubmitApplicationRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<SubmitApplicationRequestProto>() {
      @java.lang.Override
      public SubmitApplicationRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<SubmitApplicationRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<SubmitApplicationRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface SubmitApplicationResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.SubmitApplicationResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.SubmitApplicationResponseProto}
   */
  public static final class SubmitApplicationResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.SubmitApplicationResponseProto)
      SubmitApplicationResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use SubmitApplicationResponseProto.newBuilder() to construct.
    private SubmitApplicationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private SubmitApplicationResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new SubmitApplicationResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.SubmitApplicationResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.SubmitApplicationResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.SubmitApplicationResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.SubmitApplicationResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<SubmitApplicationResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<SubmitApplicationResponseProto>() {
      @java.lang.Override
      public SubmitApplicationResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<SubmitApplicationResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<SubmitApplicationResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface FailApplicationAttemptRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.FailApplicationAttemptRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return Whether the applicationAttemptId field is set.
     */
    boolean hasApplicationAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return The applicationAttemptId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.FailApplicationAttemptRequestProto}
   */
  public static final class FailApplicationAttemptRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.FailApplicationAttemptRequestProto)
      FailApplicationAttemptRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use FailApplicationAttemptRequestProto.newBuilder() to construct.
    private FailApplicationAttemptRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private FailApplicationAttemptRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new FailApplicationAttemptRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return Whether the applicationAttemptId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationAttemptId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return The applicationAttemptId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationAttemptId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationAttemptId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto) obj;

      if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false;
      if (hasApplicationAttemptId()) {
        if (!getApplicationAttemptId()
            .equals(other.getApplicationAttemptId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationAttemptId()) {
        hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationAttemptId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.FailApplicationAttemptRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.FailApplicationAttemptRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationAttemptIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationAttemptId_ = null;
        if (applicationAttemptIdBuilder_ != null) {
          applicationAttemptIdBuilder_.dispose();
          applicationAttemptIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationAttemptId_ = applicationAttemptIdBuilder_ == null
              ? applicationAttemptId_
              : applicationAttemptIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationAttemptId()) {
          mergeApplicationAttemptId(other.getApplicationAttemptId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationAttemptIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       * @return Whether the applicationAttemptId field is set.
       */
      public boolean hasApplicationAttemptId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       * @return The applicationAttemptId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        } else {
          return applicationAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationAttemptId_ = value;
        } else {
          applicationAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder setApplicationAttemptId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = builderForValue.build();
        } else {
          applicationAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationAttemptId_ != null &&
            applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
            getApplicationAttemptIdBuilder().mergeFrom(value);
          } else {
            applicationAttemptId_ = value;
          }
        } else {
          applicationAttemptIdBuilder_.mergeFrom(value);
        }
        if (applicationAttemptId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder clearApplicationAttemptId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationAttemptId_ = null;
        if (applicationAttemptIdBuilder_ != null) {
          applicationAttemptIdBuilder_.dispose();
          applicationAttemptIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
        if (applicationAttemptIdBuilder_ != null) {
          return applicationAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationAttemptId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> 
          getApplicationAttemptIdFieldBuilder() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
                  getApplicationAttemptId(),
                  getParentForChildren(),
                  isClean());
          applicationAttemptId_ = null;
        }
        return applicationAttemptIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.FailApplicationAttemptRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.FailApplicationAttemptRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<FailApplicationAttemptRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<FailApplicationAttemptRequestProto>() {
      @java.lang.Override
      public FailApplicationAttemptRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<FailApplicationAttemptRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<FailApplicationAttemptRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface FailApplicationAttemptResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.FailApplicationAttemptResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.FailApplicationAttemptResponseProto}
   */
  public static final class FailApplicationAttemptResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.FailApplicationAttemptResponseProto)
      FailApplicationAttemptResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use FailApplicationAttemptResponseProto.newBuilder() to construct.
    private FailApplicationAttemptResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private FailApplicationAttemptResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new FailApplicationAttemptResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.FailApplicationAttemptResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.FailApplicationAttemptResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.FailApplicationAttemptResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.FailApplicationAttemptResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<FailApplicationAttemptResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<FailApplicationAttemptResponseProto>() {
      @java.lang.Override
      public FailApplicationAttemptResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<FailApplicationAttemptResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<FailApplicationAttemptResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface KillApplicationRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.KillApplicationRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * <code>optional string diagnostics = 2;</code>
     * @return Whether the diagnostics field is set.
     */
    boolean hasDiagnostics();
    /**
     * <code>optional string diagnostics = 2;</code>
     * @return The diagnostics.
     */
    java.lang.String getDiagnostics();
    /**
     * <code>optional string diagnostics = 2;</code>
     * @return The bytes for diagnostics.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.KillApplicationRequestProto}
   */
  public static final class KillApplicationRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.KillApplicationRequestProto)
      KillApplicationRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use KillApplicationRequestProto.newBuilder() to construct.
    private KillApplicationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private KillApplicationRequestProto() {
      diagnostics_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new KillApplicationRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int DIAGNOSTICS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object diagnostics_ = "";
    /**
     * <code>optional string diagnostics = 2;</code>
     * @return Whether the diagnostics field is set.
     */
    @java.lang.Override
    public boolean hasDiagnostics() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string diagnostics = 2;</code>
     * @return The diagnostics.
     */
    @java.lang.Override
    public java.lang.String getDiagnostics() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnostics_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string diagnostics = 2;</code>
     * @return The bytes for diagnostics.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnostics_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, diagnostics_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, diagnostics_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasDiagnostics() != other.hasDiagnostics()) return false;
      if (hasDiagnostics()) {
        if (!getDiagnostics()
            .equals(other.getDiagnostics())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasDiagnostics()) {
        hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnostics().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.KillApplicationRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.KillApplicationRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        diagnostics_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.diagnostics_ = diagnostics_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasDiagnostics()) {
          diagnostics_ = other.diagnostics_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                diagnostics_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private java.lang.Object diagnostics_ = "";
      /**
       * <code>optional string diagnostics = 2;</code>
       * @return Whether the diagnostics field is set.
       */
      public boolean hasDiagnostics() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string diagnostics = 2;</code>
       * @return The diagnostics.
       */
      public java.lang.String getDiagnostics() {
        java.lang.Object ref = diagnostics_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnostics_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 2;</code>
       * @return The bytes for diagnostics.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsBytes() {
        java.lang.Object ref = diagnostics_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnostics_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 2;</code>
       * @param value The diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnostics(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearDiagnostics() {
        diagnostics_ = getDefaultInstance().getDiagnostics();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 2;</code>
       * @param value The bytes for diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnosticsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.KillApplicationRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.KillApplicationRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<KillApplicationRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<KillApplicationRequestProto>() {
      @java.lang.Override
      public KillApplicationRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<KillApplicationRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<KillApplicationRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface KillApplicationResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.KillApplicationResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional bool is_kill_completed = 1 [default = false];</code>
     * @return Whether the isKillCompleted field is set.
     */
    boolean hasIsKillCompleted();
    /**
     * <code>optional bool is_kill_completed = 1 [default = false];</code>
     * @return The isKillCompleted.
     */
    boolean getIsKillCompleted();
  }
  /**
   * Protobuf type {@code hadoop.yarn.KillApplicationResponseProto}
   */
  public static final class KillApplicationResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.KillApplicationResponseProto)
      KillApplicationResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use KillApplicationResponseProto.newBuilder() to construct.
    private KillApplicationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private KillApplicationResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new KillApplicationResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int IS_KILL_COMPLETED_FIELD_NUMBER = 1;
    private boolean isKillCompleted_ = false;
    /**
     * <code>optional bool is_kill_completed = 1 [default = false];</code>
     * @return Whether the isKillCompleted field is set.
     */
    @java.lang.Override
    public boolean hasIsKillCompleted() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional bool is_kill_completed = 1 [default = false];</code>
     * @return The isKillCompleted.
     */
    @java.lang.Override
    public boolean getIsKillCompleted() {
      return isKillCompleted_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeBool(1, isKillCompleted_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(1, isKillCompleted_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto) obj;

      if (hasIsKillCompleted() != other.hasIsKillCompleted()) return false;
      if (hasIsKillCompleted()) {
        if (getIsKillCompleted()
            != other.getIsKillCompleted()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasIsKillCompleted()) {
        hash = (37 * hash) + IS_KILL_COMPLETED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getIsKillCompleted());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.KillApplicationResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.KillApplicationResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        isKillCompleted_ = false;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.isKillCompleted_ = isKillCompleted_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.getDefaultInstance()) return this;
        if (other.hasIsKillCompleted()) {
          setIsKillCompleted(other.getIsKillCompleted());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                isKillCompleted_ = input.readBool();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private boolean isKillCompleted_ ;
      /**
       * <code>optional bool is_kill_completed = 1 [default = false];</code>
       * @return Whether the isKillCompleted field is set.
       */
      @java.lang.Override
      public boolean hasIsKillCompleted() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional bool is_kill_completed = 1 [default = false];</code>
       * @return The isKillCompleted.
       */
      @java.lang.Override
      public boolean getIsKillCompleted() {
        return isKillCompleted_;
      }
      /**
       * <code>optional bool is_kill_completed = 1 [default = false];</code>
       * @param value The isKillCompleted to set.
       * @return This builder for chaining.
       */
      public Builder setIsKillCompleted(boolean value) {

        isKillCompleted_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool is_kill_completed = 1 [default = false];</code>
       * @return This builder for chaining.
       */
      public Builder clearIsKillCompleted() {
        bitField0_ = (bitField0_ & ~0x00000001);
        isKillCompleted_ = false;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.KillApplicationResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.KillApplicationResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<KillApplicationResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<KillApplicationResponseProto>() {
      @java.lang.Override
      public KillApplicationResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<KillApplicationResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<KillApplicationResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetClusterMetricsRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterMetricsRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetClusterMetricsRequestProto}
   */
  public static final class GetClusterMetricsRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterMetricsRequestProto)
      GetClusterMetricsRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetClusterMetricsRequestProto.newBuilder() to construct.
    private GetClusterMetricsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetClusterMetricsRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetClusterMetricsRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetClusterMetricsRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterMetricsRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterMetricsRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterMetricsRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterMetricsRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetClusterMetricsRequestProto>() {
      @java.lang.Override
      public GetClusterMetricsRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterMetricsRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterMetricsRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetClusterMetricsResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterMetricsResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
     * @return Whether the clusterMetrics field is set.
     */
    boolean hasClusterMetrics();
    /**
     * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
     * @return The clusterMetrics.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getClusterMetrics();
    /**
     * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder getClusterMetricsOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetClusterMetricsResponseProto}
   */
  public static final class GetClusterMetricsResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterMetricsResponseProto)
      GetClusterMetricsResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetClusterMetricsResponseProto.newBuilder() to construct.
    private GetClusterMetricsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetClusterMetricsResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetClusterMetricsResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int CLUSTER_METRICS_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto clusterMetrics_;
    /**
     * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
     * @return Whether the clusterMetrics field is set.
     */
    @java.lang.Override
    public boolean hasClusterMetrics() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
     * @return The clusterMetrics.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getClusterMetrics() {
      return clusterMetrics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance() : clusterMetrics_;
    }
    /**
     * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder getClusterMetricsOrBuilder() {
      return clusterMetrics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance() : clusterMetrics_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getClusterMetrics());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getClusterMetrics());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto) obj;

      if (hasClusterMetrics() != other.hasClusterMetrics()) return false;
      if (hasClusterMetrics()) {
        if (!getClusterMetrics()
            .equals(other.getClusterMetrics())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasClusterMetrics()) {
        hash = (37 * hash) + CLUSTER_METRICS_FIELD_NUMBER;
        hash = (53 * hash) + getClusterMetrics().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetClusterMetricsResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterMetricsResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getClusterMetricsFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        clusterMetrics_ = null;
        if (clusterMetricsBuilder_ != null) {
          clusterMetricsBuilder_.dispose();
          clusterMetricsBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.clusterMetrics_ = clusterMetricsBuilder_ == null
              ? clusterMetrics_
              : clusterMetricsBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.getDefaultInstance()) return this;
        if (other.hasClusterMetrics()) {
          mergeClusterMetrics(other.getClusterMetrics());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getClusterMetricsFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto clusterMetrics_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder> clusterMetricsBuilder_;
      /**
       * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
       * @return Whether the clusterMetrics field is set.
       */
      public boolean hasClusterMetrics() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
       * @return The clusterMetrics.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getClusterMetrics() {
        if (clusterMetricsBuilder_ == null) {
          return clusterMetrics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance() : clusterMetrics_;
        } else {
          return clusterMetricsBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
       */
      public Builder setClusterMetrics(org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto value) {
        if (clusterMetricsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          clusterMetrics_ = value;
        } else {
          clusterMetricsBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
       */
      public Builder setClusterMetrics(
          org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder builderForValue) {
        if (clusterMetricsBuilder_ == null) {
          clusterMetrics_ = builderForValue.build();
        } else {
          clusterMetricsBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
       */
      public Builder mergeClusterMetrics(org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto value) {
        if (clusterMetricsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            clusterMetrics_ != null &&
            clusterMetrics_ != org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance()) {
            getClusterMetricsBuilder().mergeFrom(value);
          } else {
            clusterMetrics_ = value;
          }
        } else {
          clusterMetricsBuilder_.mergeFrom(value);
        }
        if (clusterMetrics_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
       */
      public Builder clearClusterMetrics() {
        bitField0_ = (bitField0_ & ~0x00000001);
        clusterMetrics_ = null;
        if (clusterMetricsBuilder_ != null) {
          clusterMetricsBuilder_.dispose();
          clusterMetricsBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder getClusterMetricsBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getClusterMetricsFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder getClusterMetricsOrBuilder() {
        if (clusterMetricsBuilder_ != null) {
          return clusterMetricsBuilder_.getMessageOrBuilder();
        } else {
          return clusterMetrics_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance() : clusterMetrics_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder> 
          getClusterMetricsFieldBuilder() {
        if (clusterMetricsBuilder_ == null) {
          clusterMetricsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder>(
                  getClusterMetrics(),
                  getParentForChildren(),
                  isClean());
          clusterMetrics_ = null;
        }
        return clusterMetricsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterMetricsResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterMetricsResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterMetricsResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetClusterMetricsResponseProto>() {
      @java.lang.Override
      public GetClusterMetricsResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterMetricsResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterMetricsResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface MoveApplicationAcrossQueuesRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.MoveApplicationAcrossQueuesRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * <code>required string target_queue = 2;</code>
     * @return Whether the targetQueue field is set.
     */
    boolean hasTargetQueue();
    /**
     * <code>required string target_queue = 2;</code>
     * @return The targetQueue.
     */
    java.lang.String getTargetQueue();
    /**
     * <code>required string target_queue = 2;</code>
     * @return The bytes for targetQueue.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTargetQueueBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.MoveApplicationAcrossQueuesRequestProto}
   */
  public static final class MoveApplicationAcrossQueuesRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.MoveApplicationAcrossQueuesRequestProto)
      MoveApplicationAcrossQueuesRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use MoveApplicationAcrossQueuesRequestProto.newBuilder() to construct.
    private MoveApplicationAcrossQueuesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private MoveApplicationAcrossQueuesRequestProto() {
      targetQueue_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new MoveApplicationAcrossQueuesRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int TARGET_QUEUE_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object targetQueue_ = "";
    /**
     * <code>required string target_queue = 2;</code>
     * @return Whether the targetQueue field is set.
     */
    @java.lang.Override
    public boolean hasTargetQueue() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required string target_queue = 2;</code>
     * @return The targetQueue.
     */
    @java.lang.Override
    public java.lang.String getTargetQueue() {
      java.lang.Object ref = targetQueue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          targetQueue_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string target_queue = 2;</code>
     * @return The bytes for targetQueue.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTargetQueueBytes() {
      java.lang.Object ref = targetQueue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        targetQueue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasApplicationId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasTargetQueue()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, targetQueue_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, targetQueue_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasTargetQueue() != other.hasTargetQueue()) return false;
      if (hasTargetQueue()) {
        if (!getTargetQueue()
            .equals(other.getTargetQueue())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasTargetQueue()) {
        hash = (37 * hash) + TARGET_QUEUE_FIELD_NUMBER;
        hash = (53 * hash) + getTargetQueue().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.MoveApplicationAcrossQueuesRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.MoveApplicationAcrossQueuesRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        targetQueue_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.targetQueue_ = targetQueue_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasTargetQueue()) {
          targetQueue_ = other.targetQueue_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasApplicationId()) {
          return false;
        }
        if (!hasTargetQueue()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                targetQueue_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private java.lang.Object targetQueue_ = "";
      /**
       * <code>required string target_queue = 2;</code>
       * @return Whether the targetQueue field is set.
       */
      public boolean hasTargetQueue() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required string target_queue = 2;</code>
       * @return The targetQueue.
       */
      public java.lang.String getTargetQueue() {
        java.lang.Object ref = targetQueue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            targetQueue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string target_queue = 2;</code>
       * @return The bytes for targetQueue.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTargetQueueBytes() {
        java.lang.Object ref = targetQueue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          targetQueue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string target_queue = 2;</code>
       * @param value The targetQueue to set.
       * @return This builder for chaining.
       */
      public Builder setTargetQueue(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        targetQueue_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required string target_queue = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearTargetQueue() {
        targetQueue_ = getDefaultInstance().getTargetQueue();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>required string target_queue = 2;</code>
       * @param value The bytes for targetQueue to set.
       * @return This builder for chaining.
       */
      public Builder setTargetQueueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        targetQueue_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.MoveApplicationAcrossQueuesRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.MoveApplicationAcrossQueuesRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<MoveApplicationAcrossQueuesRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<MoveApplicationAcrossQueuesRequestProto>() {
      @java.lang.Override
      public MoveApplicationAcrossQueuesRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<MoveApplicationAcrossQueuesRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<MoveApplicationAcrossQueuesRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface MoveApplicationAcrossQueuesResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.MoveApplicationAcrossQueuesResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.MoveApplicationAcrossQueuesResponseProto}
   */
  public static final class MoveApplicationAcrossQueuesResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.MoveApplicationAcrossQueuesResponseProto)
      MoveApplicationAcrossQueuesResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use MoveApplicationAcrossQueuesResponseProto.newBuilder() to construct.
    private MoveApplicationAcrossQueuesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private MoveApplicationAcrossQueuesResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new MoveApplicationAcrossQueuesResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.MoveApplicationAcrossQueuesResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.MoveApplicationAcrossQueuesResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.MoveApplicationAcrossQueuesResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.MoveApplicationAcrossQueuesResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<MoveApplicationAcrossQueuesResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<MoveApplicationAcrossQueuesResponseProto>() {
      @java.lang.Override
      public MoveApplicationAcrossQueuesResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<MoveApplicationAcrossQueuesResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<MoveApplicationAcrossQueuesResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetApplicationsRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationsRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated string application_types = 1;</code>
     * @return A list containing the applicationTypes.
     */
    java.util.List<java.lang.String>
        getApplicationTypesList();
    /**
     * <code>repeated string application_types = 1;</code>
     * @return The count of applicationTypes.
     */
    int getApplicationTypesCount();
    /**
     * <code>repeated string application_types = 1;</code>
     * @param index The index of the element to return.
     * @return The applicationTypes at the given index.
     */
    java.lang.String getApplicationTypes(int index);
    /**
     * <code>repeated string application_types = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the applicationTypes at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTypesBytes(int index);

    /**
     * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
     * @return A list containing the applicationStates.
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto> getApplicationStatesList();
    /**
     * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
     * @return The count of applicationStates.
     */
    int getApplicationStatesCount();
    /**
     * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
     * @param index The index of the element to return.
     * @return The applicationStates at the given index.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getApplicationStates(int index);

    /**
     * <code>repeated string users = 3;</code>
     * @return A list containing the users.
     */
    java.util.List<java.lang.String>
        getUsersList();
    /**
     * <code>repeated string users = 3;</code>
     * @return The count of users.
     */
    int getUsersCount();
    /**
     * <code>repeated string users = 3;</code>
     * @param index The index of the element to return.
     * @return The users at the given index.
     */
    java.lang.String getUsers(int index);
    /**
     * <code>repeated string users = 3;</code>
     * @param index The index of the value to return.
     * @return The bytes of the users at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUsersBytes(int index);

    /**
     * <code>repeated string queues = 4;</code>
     * @return A list containing the queues.
     */
    java.util.List<java.lang.String>
        getQueuesList();
    /**
     * <code>repeated string queues = 4;</code>
     * @return The count of queues.
     */
    int getQueuesCount();
    /**
     * <code>repeated string queues = 4;</code>
     * @param index The index of the element to return.
     * @return The queues at the given index.
     */
    java.lang.String getQueues(int index);
    /**
     * <code>repeated string queues = 4;</code>
     * @param index The index of the value to return.
     * @return The bytes of the queues at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueuesBytes(int index);

    /**
     * <code>optional int64 limit = 5;</code>
     * @return Whether the limit field is set.
     */
    boolean hasLimit();
    /**
     * <code>optional int64 limit = 5;</code>
     * @return The limit.
     */
    long getLimit();

    /**
     * <code>optional int64 start_begin = 6;</code>
     * @return Whether the startBegin field is set.
     */
    boolean hasStartBegin();
    /**
     * <code>optional int64 start_begin = 6;</code>
     * @return The startBegin.
     */
    long getStartBegin();

    /**
     * <code>optional int64 start_end = 7;</code>
     * @return Whether the startEnd field is set.
     */
    boolean hasStartEnd();
    /**
     * <code>optional int64 start_end = 7;</code>
     * @return The startEnd.
     */
    long getStartEnd();

    /**
     * <code>optional int64 finish_begin = 8;</code>
     * @return Whether the finishBegin field is set.
     */
    boolean hasFinishBegin();
    /**
     * <code>optional int64 finish_begin = 8;</code>
     * @return The finishBegin.
     */
    long getFinishBegin();

    /**
     * <code>optional int64 finish_end = 9;</code>
     * @return Whether the finishEnd field is set.
     */
    boolean hasFinishEnd();
    /**
     * <code>optional int64 finish_end = 9;</code>
     * @return The finishEnd.
     */
    long getFinishEnd();

    /**
     * <code>repeated string applicationTags = 10;</code>
     * @return A list containing the applicationTags.
     */
    java.util.List<java.lang.String>
        getApplicationTagsList();
    /**
     * <code>repeated string applicationTags = 10;</code>
     * @return The count of applicationTags.
     */
    int getApplicationTagsCount();
    /**
     * <code>repeated string applicationTags = 10;</code>
     * @param index The index of the element to return.
     * @return The applicationTags at the given index.
     */
    java.lang.String getApplicationTags(int index);
    /**
     * <code>repeated string applicationTags = 10;</code>
     * @param index The index of the value to return.
     * @return The bytes of the applicationTags at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTagsBytes(int index);

    /**
     * <code>optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL];</code>
     * @return Whether the scope field is set.
     */
    boolean hasScope();
    /**
     * <code>optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL];</code>
     * @return The scope.
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto getScope();

    /**
     * <code>optional string name = 12;</code>
     * @return Whether the name field is set.
     */
    boolean hasName();
    /**
     * <code>optional string name = 12;</code>
     * @return The name.
     */
    java.lang.String getName();
    /**
     * <code>optional string name = 12;</code>
     * @return The bytes for name.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetApplicationsRequestProto}
   */
  public static final class GetApplicationsRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationsRequestProto)
      GetApplicationsRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetApplicationsRequestProto.newBuilder() to construct.
    private GetApplicationsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetApplicationsRequestProto() {
      applicationTypes_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      applicationStates_ = java.util.Collections.emptyList();
      users_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      queues_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      applicationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      scope_ = 0;
      name_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetApplicationsRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_TYPES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList applicationTypes_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string application_types = 1;</code>
     * @return A list containing the applicationTypes.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getApplicationTypesList() {
      return applicationTypes_;
    }
    /**
     * <code>repeated string application_types = 1;</code>
     * @return The count of applicationTypes.
     */
    public int getApplicationTypesCount() {
      return applicationTypes_.size();
    }
    /**
     * <code>repeated string application_types = 1;</code>
     * @param index The index of the element to return.
     * @return The applicationTypes at the given index.
     */
    public java.lang.String getApplicationTypes(int index) {
      return applicationTypes_.get(index);
    }
    /**
     * <code>repeated string application_types = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the applicationTypes at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTypesBytes(int index) {
      return applicationTypes_.getByteString(index);
    }

    public static final int APPLICATION_STATES_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<java.lang.Integer> applicationStates_;
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter<
        java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto> applicationStates_converter_ =
            new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter<
                java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto>() {
              public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto convert(java.lang.Integer from) {
                org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.forNumber(from);
                return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.NEW : result;
              }
            };
    /**
     * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
     * @return A list containing the applicationStates.
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto> getApplicationStatesList() {
      return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter<
          java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto>(applicationStates_, applicationStates_converter_);
    }
    /**
     * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
     * @return The count of applicationStates.
     */
    @java.lang.Override
    public int getApplicationStatesCount() {
      return applicationStates_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
     * @param index The index of the element to return.
     * @return The applicationStates at the given index.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getApplicationStates(int index) {
      return applicationStates_converter_.convert(applicationStates_.get(index));
    }

    public static final int USERS_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList users_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string users = 3;</code>
     * @return A list containing the users.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getUsersList() {
      return users_;
    }
    /**
     * <code>repeated string users = 3;</code>
     * @return The count of users.
     */
    public int getUsersCount() {
      return users_.size();
    }
    /**
     * <code>repeated string users = 3;</code>
     * @param index The index of the element to return.
     * @return The users at the given index.
     */
    public java.lang.String getUsers(int index) {
      return users_.get(index);
    }
    /**
     * <code>repeated string users = 3;</code>
     * @param index The index of the value to return.
     * @return The bytes of the users at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUsersBytes(int index) {
      return users_.getByteString(index);
    }

    public static final int QUEUES_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList queues_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string queues = 4;</code>
     * @return A list containing the queues.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getQueuesList() {
      return queues_;
    }
    /**
     * <code>repeated string queues = 4;</code>
     * @return The count of queues.
     */
    public int getQueuesCount() {
      return queues_.size();
    }
    /**
     * <code>repeated string queues = 4;</code>
     * @param index The index of the element to return.
     * @return The queues at the given index.
     */
    public java.lang.String getQueues(int index) {
      return queues_.get(index);
    }
    /**
     * <code>repeated string queues = 4;</code>
     * @param index The index of the value to return.
     * @return The bytes of the queues at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueuesBytes(int index) {
      return queues_.getByteString(index);
    }

    public static final int LIMIT_FIELD_NUMBER = 5;
    private long limit_ = 0L;
    /**
     * <code>optional int64 limit = 5;</code>
     * @return Whether the limit field is set.
     */
    @java.lang.Override
    public boolean hasLimit() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int64 limit = 5;</code>
     * @return The limit.
     */
    @java.lang.Override
    public long getLimit() {
      return limit_;
    }

    public static final int START_BEGIN_FIELD_NUMBER = 6;
    private long startBegin_ = 0L;
    /**
     * <code>optional int64 start_begin = 6;</code>
     * @return Whether the startBegin field is set.
     */
    @java.lang.Override
    public boolean hasStartBegin() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 start_begin = 6;</code>
     * @return The startBegin.
     */
    @java.lang.Override
    public long getStartBegin() {
      return startBegin_;
    }

    public static final int START_END_FIELD_NUMBER = 7;
    private long startEnd_ = 0L;
    /**
     * <code>optional int64 start_end = 7;</code>
     * @return Whether the startEnd field is set.
     */
    @java.lang.Override
    public boolean hasStartEnd() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int64 start_end = 7;</code>
     * @return The startEnd.
     */
    @java.lang.Override
    public long getStartEnd() {
      return startEnd_;
    }

    public static final int FINISH_BEGIN_FIELD_NUMBER = 8;
    private long finishBegin_ = 0L;
    /**
     * <code>optional int64 finish_begin = 8;</code>
     * @return Whether the finishBegin field is set.
     */
    @java.lang.Override
    public boolean hasFinishBegin() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional int64 finish_begin = 8;</code>
     * @return The finishBegin.
     */
    @java.lang.Override
    public long getFinishBegin() {
      return finishBegin_;
    }

    public static final int FINISH_END_FIELD_NUMBER = 9;
    private long finishEnd_ = 0L;
    /**
     * <code>optional int64 finish_end = 9;</code>
     * @return Whether the finishEnd field is set.
     */
    @java.lang.Override
    public boolean hasFinishEnd() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional int64 finish_end = 9;</code>
     * @return The finishEnd.
     */
    @java.lang.Override
    public long getFinishEnd() {
      return finishEnd_;
    }

    public static final int APPLICATIONTAGS_FIELD_NUMBER = 10;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList applicationTags_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string applicationTags = 10;</code>
     * @return A list containing the applicationTags.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getApplicationTagsList() {
      return applicationTags_;
    }
    /**
     * <code>repeated string applicationTags = 10;</code>
     * @return The count of applicationTags.
     */
    public int getApplicationTagsCount() {
      return applicationTags_.size();
    }
    /**
     * <code>repeated string applicationTags = 10;</code>
     * @param index The index of the element to return.
     * @return The applicationTags at the given index.
     */
    public java.lang.String getApplicationTags(int index) {
      return applicationTags_.get(index);
    }
    /**
     * <code>repeated string applicationTags = 10;</code>
     * @param index The index of the value to return.
     * @return The bytes of the applicationTags at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTagsBytes(int index) {
      return applicationTags_.getByteString(index);
    }

    public static final int SCOPE_FIELD_NUMBER = 11;
    private int scope_ = 0;
    /**
     * <code>optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL];</code>
     * @return Whether the scope field is set.
     */
    @java.lang.Override public boolean hasScope() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL];</code>
     * @return The scope.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto getScope() {
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto.forNumber(scope_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto.ALL : result;
    }

    public static final int NAME_FIELD_NUMBER = 12;
    @SuppressWarnings("serial")
    private volatile java.lang.Object name_ = "";
    /**
     * <code>optional string name = 12;</code>
     * @return Whether the name field is set.
     */
    @java.lang.Override
    public boolean hasName() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional string name = 12;</code>
     * @return The name.
     */
    @java.lang.Override
    public java.lang.String getName() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          name_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string name = 12;</code>
     * @return The bytes for name.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        name_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < applicationTypes_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, applicationTypes_.getRaw(i));
      }
      for (int i = 0; i < applicationStates_.size(); i++) {
        output.writeEnum(2, applicationStates_.get(i));
      }
      for (int i = 0; i < users_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, users_.getRaw(i));
      }
      for (int i = 0; i < queues_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, queues_.getRaw(i));
      }
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt64(5, limit_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(6, startBegin_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(7, startEnd_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt64(8, finishBegin_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeInt64(9, finishEnd_);
      }
      for (int i = 0; i < applicationTags_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 10, applicationTags_.getRaw(i));
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeEnum(11, scope_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 12, name_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      {
        int dataSize = 0;
        for (int i = 0; i < applicationTypes_.size(); i++) {
          dataSize += computeStringSizeNoTag(applicationTypes_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getApplicationTypesList().size();
      }
      {
        int dataSize = 0;
        for (int i = 0; i < applicationStates_.size(); i++) {
          dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
            .computeEnumSizeNoTag(applicationStates_.get(i));
        }
        size += dataSize;
        size += 1 * applicationStates_.size();
      }
      {
        int dataSize = 0;
        for (int i = 0; i < users_.size(); i++) {
          dataSize += computeStringSizeNoTag(users_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getUsersList().size();
      }
      {
        int dataSize = 0;
        for (int i = 0; i < queues_.size(); i++) {
          dataSize += computeStringSizeNoTag(queues_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getQueuesList().size();
      }
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(5, limit_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(6, startBegin_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(7, startEnd_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(8, finishBegin_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(9, finishEnd_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < applicationTags_.size(); i++) {
          dataSize += computeStringSizeNoTag(applicationTags_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getApplicationTagsList().size();
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(11, scope_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(12, name_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto) obj;

      if (!getApplicationTypesList()
          .equals(other.getApplicationTypesList())) return false;
      if (!applicationStates_.equals(other.applicationStates_)) return false;
      if (!getUsersList()
          .equals(other.getUsersList())) return false;
      if (!getQueuesList()
          .equals(other.getQueuesList())) return false;
      if (hasLimit() != other.hasLimit()) return false;
      if (hasLimit()) {
        if (getLimit()
            != other.getLimit()) return false;
      }
      if (hasStartBegin() != other.hasStartBegin()) return false;
      if (hasStartBegin()) {
        if (getStartBegin()
            != other.getStartBegin()) return false;
      }
      if (hasStartEnd() != other.hasStartEnd()) return false;
      if (hasStartEnd()) {
        if (getStartEnd()
            != other.getStartEnd()) return false;
      }
      if (hasFinishBegin() != other.hasFinishBegin()) return false;
      if (hasFinishBegin()) {
        if (getFinishBegin()
            != other.getFinishBegin()) return false;
      }
      if (hasFinishEnd() != other.hasFinishEnd()) return false;
      if (hasFinishEnd()) {
        if (getFinishEnd()
            != other.getFinishEnd()) return false;
      }
      if (!getApplicationTagsList()
          .equals(other.getApplicationTagsList())) return false;
      if (hasScope() != other.hasScope()) return false;
      if (hasScope()) {
        if (scope_ != other.scope_) return false;
      }
      if (hasName() != other.hasName()) return false;
      if (hasName()) {
        if (!getName()
            .equals(other.getName())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getApplicationTypesCount() > 0) {
        hash = (37 * hash) + APPLICATION_TYPES_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationTypesList().hashCode();
      }
      if (getApplicationStatesCount() > 0) {
        hash = (37 * hash) + APPLICATION_STATES_FIELD_NUMBER;
        hash = (53 * hash) + applicationStates_.hashCode();
      }
      if (getUsersCount() > 0) {
        hash = (37 * hash) + USERS_FIELD_NUMBER;
        hash = (53 * hash) + getUsersList().hashCode();
      }
      if (getQueuesCount() > 0) {
        hash = (37 * hash) + QUEUES_FIELD_NUMBER;
        hash = (53 * hash) + getQueuesList().hashCode();
      }
      if (hasLimit()) {
        hash = (37 * hash) + LIMIT_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getLimit());
      }
      if (hasStartBegin()) {
        hash = (37 * hash) + START_BEGIN_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartBegin());
      }
      if (hasStartEnd()) {
        hash = (37 * hash) + START_END_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartEnd());
      }
      if (hasFinishBegin()) {
        hash = (37 * hash) + FINISH_BEGIN_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFinishBegin());
      }
      if (hasFinishEnd()) {
        hash = (37 * hash) + FINISH_END_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFinishEnd());
      }
      if (getApplicationTagsCount() > 0) {
        hash = (37 * hash) + APPLICATIONTAGS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationTagsList().hashCode();
      }
      if (hasScope()) {
        hash = (37 * hash) + SCOPE_FIELD_NUMBER;
        hash = (53 * hash) + scope_;
      }
      if (hasName()) {
        hash = (37 * hash) + NAME_FIELD_NUMBER;
        hash = (53 * hash) + getName().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetApplicationsRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationsRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationTypes_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        applicationStates_ = java.util.Collections.emptyList();
        bitField0_ = (bitField0_ & ~0x00000002);
        users_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        queues_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        limit_ = 0L;
        startBegin_ = 0L;
        startEnd_ = 0L;
        finishBegin_ = 0L;
        finishEnd_ = 0L;
        applicationTags_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        scope_ = 0;
        name_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto result) {
        if (((bitField0_ & 0x00000002) != 0)) {
          applicationStates_ = java.util.Collections.unmodifiableList(applicationStates_);
          bitField0_ = (bitField0_ & ~0x00000002);
        }
        result.applicationStates_ = applicationStates_;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto result) {
        int from_bitField0_ = bitField0_;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          applicationTypes_.makeImmutable();
          result.applicationTypes_ = applicationTypes_;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          users_.makeImmutable();
          result.users_ = users_;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          queues_.makeImmutable();
          result.queues_ = queues_;
        }
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.limit_ = limit_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.startBegin_ = startBegin_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.startEnd_ = startEnd_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.finishBegin_ = finishBegin_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.finishEnd_ = finishEnd_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          applicationTags_.makeImmutable();
          result.applicationTags_ = applicationTags_;
        }
        if (((from_bitField0_ & 0x00000400) != 0)) {
          result.scope_ = scope_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000800) != 0)) {
          result.name_ = name_;
          to_bitField0_ |= 0x00000040;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.getDefaultInstance()) return this;
        if (!other.applicationTypes_.isEmpty()) {
          if (applicationTypes_.isEmpty()) {
            applicationTypes_ = other.applicationTypes_;
            bitField0_ |= 0x00000001;
          } else {
            ensureApplicationTypesIsMutable();
            applicationTypes_.addAll(other.applicationTypes_);
          }
          onChanged();
        }
        if (!other.applicationStates_.isEmpty()) {
          if (applicationStates_.isEmpty()) {
            applicationStates_ = other.applicationStates_;
            bitField0_ = (bitField0_ & ~0x00000002);
          } else {
            ensureApplicationStatesIsMutable();
            applicationStates_.addAll(other.applicationStates_);
          }
          onChanged();
        }
        if (!other.users_.isEmpty()) {
          if (users_.isEmpty()) {
            users_ = other.users_;
            bitField0_ |= 0x00000004;
          } else {
            ensureUsersIsMutable();
            users_.addAll(other.users_);
          }
          onChanged();
        }
        if (!other.queues_.isEmpty()) {
          if (queues_.isEmpty()) {
            queues_ = other.queues_;
            bitField0_ |= 0x00000008;
          } else {
            ensureQueuesIsMutable();
            queues_.addAll(other.queues_);
          }
          onChanged();
        }
        if (other.hasLimit()) {
          setLimit(other.getLimit());
        }
        if (other.hasStartBegin()) {
          setStartBegin(other.getStartBegin());
        }
        if (other.hasStartEnd()) {
          setStartEnd(other.getStartEnd());
        }
        if (other.hasFinishBegin()) {
          setFinishBegin(other.getFinishBegin());
        }
        if (other.hasFinishEnd()) {
          setFinishEnd(other.getFinishEnd());
        }
        if (!other.applicationTags_.isEmpty()) {
          if (applicationTags_.isEmpty()) {
            applicationTags_ = other.applicationTags_;
            bitField0_ |= 0x00000200;
          } else {
            ensureApplicationTagsIsMutable();
            applicationTags_.addAll(other.applicationTags_);
          }
          onChanged();
        }
        if (other.hasScope()) {
          setScope(other.getScope());
        }
        if (other.hasName()) {
          name_ = other.name_;
          bitField0_ |= 0x00000800;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureApplicationTypesIsMutable();
                applicationTypes_.add(bs);
                break;
              } // case 10
              case 16: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(2, tmpRaw);
                } else {
                  ensureApplicationStatesIsMutable();
                  applicationStates_.add(tmpRaw);
                }
                break;
              } // case 16
              case 18: {
                int length = input.readRawVarint32();
                int oldLimit = input.pushLimit(length);
                while(input.getBytesUntilLimit() > 0) {
                  int tmpRaw = input.readEnum();
                  org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto tmpValue =
                      org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.forNumber(tmpRaw);
                  if (tmpValue == null) {
                    mergeUnknownVarintField(2, tmpRaw);
                  } else {
                    ensureApplicationStatesIsMutable();
                    applicationStates_.add(tmpRaw);
                  }
                }
                input.popLimit(oldLimit);
                break;
              } // case 18
              case 26: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureUsersIsMutable();
                users_.add(bs);
                break;
              } // case 26
              case 34: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureQueuesIsMutable();
                queues_.add(bs);
                break;
              } // case 34
              case 40: {
                limit_ = input.readInt64();
                bitField0_ |= 0x00000010;
                break;
              } // case 40
              case 48: {
                startBegin_ = input.readInt64();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 56: {
                startEnd_ = input.readInt64();
                bitField0_ |= 0x00000040;
                break;
              } // case 56
              case 64: {
                finishBegin_ = input.readInt64();
                bitField0_ |= 0x00000080;
                break;
              } // case 64
              case 72: {
                finishEnd_ = input.readInt64();
                bitField0_ |= 0x00000100;
                break;
              } // case 72
              case 82: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureApplicationTagsIsMutable();
                applicationTags_.add(bs);
                break;
              } // case 82
              case 88: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(11, tmpRaw);
                } else {
                  scope_ = tmpRaw;
                  bitField0_ |= 0x00000400;
                }
                break;
              } // case 88
              case 98: {
                name_ = input.readBytes();
                bitField0_ |= 0x00000800;
                break;
              } // case 98
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList applicationTypes_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureApplicationTypesIsMutable() {
        if (!applicationTypes_.isModifiable()) {
          applicationTypes_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(applicationTypes_);
        }
        bitField0_ |= 0x00000001;
      }
      /**
       * <code>repeated string application_types = 1;</code>
       * @return A list containing the applicationTypes.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getApplicationTypesList() {
        applicationTypes_.makeImmutable();
        return applicationTypes_;
      }
      /**
       * <code>repeated string application_types = 1;</code>
       * @return The count of applicationTypes.
       */
      public int getApplicationTypesCount() {
        return applicationTypes_.size();
      }
      /**
       * <code>repeated string application_types = 1;</code>
       * @param index The index of the element to return.
       * @return The applicationTypes at the given index.
       */
      public java.lang.String getApplicationTypes(int index) {
        return applicationTypes_.get(index);
      }
      /**
       * <code>repeated string application_types = 1;</code>
       * @param index The index of the value to return.
       * @return The bytes of the applicationTypes at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationTypesBytes(int index) {
        return applicationTypes_.getByteString(index);
      }
      /**
       * <code>repeated string application_types = 1;</code>
       * @param index The index to set the value at.
       * @param value The applicationTypes to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationTypes(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTypesIsMutable();
        applicationTypes_.set(index, value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string application_types = 1;</code>
       * @param value The applicationTypes to add.
       * @return This builder for chaining.
       */
      public Builder addApplicationTypes(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTypesIsMutable();
        applicationTypes_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string application_types = 1;</code>
       * @param values The applicationTypes to add.
       * @return This builder for chaining.
       */
      public Builder addAllApplicationTypes(
          java.lang.Iterable<java.lang.String> values) {
        ensureApplicationTypesIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, applicationTypes_);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string application_types = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationTypes() {
        applicationTypes_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000001);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string application_types = 1;</code>
       * @param value The bytes of the applicationTypes to add.
       * @return This builder for chaining.
       */
      public Builder addApplicationTypesBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTypesIsMutable();
        applicationTypes_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.util.List<java.lang.Integer> applicationStates_ =
        java.util.Collections.emptyList();
      private void ensureApplicationStatesIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          applicationStates_ = new java.util.ArrayList<java.lang.Integer>(applicationStates_);
          bitField0_ |= 0x00000002;
        }
      }
      /**
       * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
       * @return A list containing the applicationStates.
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto> getApplicationStatesList() {
        return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter<
            java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto>(applicationStates_, applicationStates_converter_);
      }
      /**
       * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
       * @return The count of applicationStates.
       */
      public int getApplicationStatesCount() {
        return applicationStates_.size();
      }
      /**
       * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
       * @param index The index of the element to return.
       * @return The applicationStates at the given index.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getApplicationStates(int index) {
        return applicationStates_converter_.convert(applicationStates_.get(index));
      }
      /**
       * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
       * @param index The index to set the value at.
       * @param value The applicationStates to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationStates(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        ensureApplicationStatesIsMutable();
        applicationStates_.set(index, value.getNumber());
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
       * @param value The applicationStates to add.
       * @return This builder for chaining.
       */
      public Builder addApplicationStates(org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        ensureApplicationStatesIsMutable();
        applicationStates_.add(value.getNumber());
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
       * @param values The applicationStates to add.
       * @return This builder for chaining.
       */
      public Builder addAllApplicationStates(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto> values) {
        ensureApplicationStatesIsMutable();
        for (org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value : values) {
          applicationStates_.add(value.getNumber());
        }
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationStates() {
        applicationStates_ = java.util.Collections.emptyList();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList users_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureUsersIsMutable() {
        if (!users_.isModifiable()) {
          users_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(users_);
        }
        bitField0_ |= 0x00000004;
      }
      /**
       * <code>repeated string users = 3;</code>
       * @return A list containing the users.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getUsersList() {
        users_.makeImmutable();
        return users_;
      }
      /**
       * <code>repeated string users = 3;</code>
       * @return The count of users.
       */
      public int getUsersCount() {
        return users_.size();
      }
      /**
       * <code>repeated string users = 3;</code>
       * @param index The index of the element to return.
       * @return The users at the given index.
       */
      public java.lang.String getUsers(int index) {
        return users_.get(index);
      }
      /**
       * <code>repeated string users = 3;</code>
       * @param index The index of the value to return.
       * @return The bytes of the users at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUsersBytes(int index) {
        return users_.getByteString(index);
      }
      /**
       * <code>repeated string users = 3;</code>
       * @param index The index to set the value at.
       * @param value The users to set.
       * @return This builder for chaining.
       */
      public Builder setUsers(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureUsersIsMutable();
        users_.set(index, value);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string users = 3;</code>
       * @param value The users to add.
       * @return This builder for chaining.
       */
      public Builder addUsers(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureUsersIsMutable();
        users_.add(value);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string users = 3;</code>
       * @param values The users to add.
       * @return This builder for chaining.
       */
      public Builder addAllUsers(
          java.lang.Iterable<java.lang.String> values) {
        ensureUsersIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, users_);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string users = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearUsers() {
        users_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000004);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string users = 3;</code>
       * @param value The bytes of the users to add.
       * @return This builder for chaining.
       */
      public Builder addUsersBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureUsersIsMutable();
        users_.add(value);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList queues_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureQueuesIsMutable() {
        if (!queues_.isModifiable()) {
          queues_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(queues_);
        }
        bitField0_ |= 0x00000008;
      }
      /**
       * <code>repeated string queues = 4;</code>
       * @return A list containing the queues.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getQueuesList() {
        queues_.makeImmutable();
        return queues_;
      }
      /**
       * <code>repeated string queues = 4;</code>
       * @return The count of queues.
       */
      public int getQueuesCount() {
        return queues_.size();
      }
      /**
       * <code>repeated string queues = 4;</code>
       * @param index The index of the element to return.
       * @return The queues at the given index.
       */
      public java.lang.String getQueues(int index) {
        return queues_.get(index);
      }
      /**
       * <code>repeated string queues = 4;</code>
       * @param index The index of the value to return.
       * @return The bytes of the queues at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueuesBytes(int index) {
        return queues_.getByteString(index);
      }
      /**
       * <code>repeated string queues = 4;</code>
       * @param index The index to set the value at.
       * @param value The queues to set.
       * @return This builder for chaining.
       */
      public Builder setQueues(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureQueuesIsMutable();
        queues_.set(index, value);
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string queues = 4;</code>
       * @param value The queues to add.
       * @return This builder for chaining.
       */
      public Builder addQueues(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureQueuesIsMutable();
        queues_.add(value);
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string queues = 4;</code>
       * @param values The queues to add.
       * @return This builder for chaining.
       */
      public Builder addAllQueues(
          java.lang.Iterable<java.lang.String> values) {
        ensureQueuesIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, queues_);
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string queues = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueues() {
        queues_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000008);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string queues = 4;</code>
       * @param value The bytes of the queues to add.
       * @return This builder for chaining.
       */
      public Builder addQueuesBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureQueuesIsMutable();
        queues_.add(value);
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }

      private long limit_ ;
      /**
       * <code>optional int64 limit = 5;</code>
       * @return Whether the limit field is set.
       */
      @java.lang.Override
      public boolean hasLimit() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional int64 limit = 5;</code>
       * @return The limit.
       */
      @java.lang.Override
      public long getLimit() {
        return limit_;
      }
      /**
       * <code>optional int64 limit = 5;</code>
       * @param value The limit to set.
       * @return This builder for chaining.
       */
      public Builder setLimit(long value) {

        limit_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 limit = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearLimit() {
        bitField0_ = (bitField0_ & ~0x00000010);
        limit_ = 0L;
        onChanged();
        return this;
      }

      private long startBegin_ ;
      /**
       * <code>optional int64 start_begin = 6;</code>
       * @return Whether the startBegin field is set.
       */
      @java.lang.Override
      public boolean hasStartBegin() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional int64 start_begin = 6;</code>
       * @return The startBegin.
       */
      @java.lang.Override
      public long getStartBegin() {
        return startBegin_;
      }
      /**
       * <code>optional int64 start_begin = 6;</code>
       * @param value The startBegin to set.
       * @return This builder for chaining.
       */
      public Builder setStartBegin(long value) {

        startBegin_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 start_begin = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearStartBegin() {
        bitField0_ = (bitField0_ & ~0x00000020);
        startBegin_ = 0L;
        onChanged();
        return this;
      }

      private long startEnd_ ;
      /**
       * <code>optional int64 start_end = 7;</code>
       * @return Whether the startEnd field is set.
       */
      @java.lang.Override
      public boolean hasStartEnd() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional int64 start_end = 7;</code>
       * @return The startEnd.
       */
      @java.lang.Override
      public long getStartEnd() {
        return startEnd_;
      }
      /**
       * <code>optional int64 start_end = 7;</code>
       * @param value The startEnd to set.
       * @return This builder for chaining.
       */
      public Builder setStartEnd(long value) {

        startEnd_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 start_end = 7;</code>
       * @return This builder for chaining.
       */
      public Builder clearStartEnd() {
        bitField0_ = (bitField0_ & ~0x00000040);
        startEnd_ = 0L;
        onChanged();
        return this;
      }

      private long finishBegin_ ;
      /**
       * <code>optional int64 finish_begin = 8;</code>
       * @return Whether the finishBegin field is set.
       */
      @java.lang.Override
      public boolean hasFinishBegin() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional int64 finish_begin = 8;</code>
       * @return The finishBegin.
       */
      @java.lang.Override
      public long getFinishBegin() {
        return finishBegin_;
      }
      /**
       * <code>optional int64 finish_begin = 8;</code>
       * @param value The finishBegin to set.
       * @return This builder for chaining.
       */
      public Builder setFinishBegin(long value) {

        finishBegin_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 finish_begin = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearFinishBegin() {
        bitField0_ = (bitField0_ & ~0x00000080);
        finishBegin_ = 0L;
        onChanged();
        return this;
      }

      private long finishEnd_ ;
      /**
       * <code>optional int64 finish_end = 9;</code>
       * @return Whether the finishEnd field is set.
       */
      @java.lang.Override
      public boolean hasFinishEnd() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional int64 finish_end = 9;</code>
       * @return The finishEnd.
       */
      @java.lang.Override
      public long getFinishEnd() {
        return finishEnd_;
      }
      /**
       * <code>optional int64 finish_end = 9;</code>
       * @param value The finishEnd to set.
       * @return This builder for chaining.
       */
      public Builder setFinishEnd(long value) {

        finishEnd_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 finish_end = 9;</code>
       * @return This builder for chaining.
       */
      public Builder clearFinishEnd() {
        bitField0_ = (bitField0_ & ~0x00000100);
        finishEnd_ = 0L;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList applicationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureApplicationTagsIsMutable() {
        if (!applicationTags_.isModifiable()) {
          applicationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(applicationTags_);
        }
        bitField0_ |= 0x00000200;
      }
      /**
       * <code>repeated string applicationTags = 10;</code>
       * @return A list containing the applicationTags.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getApplicationTagsList() {
        applicationTags_.makeImmutable();
        return applicationTags_;
      }
      /**
       * <code>repeated string applicationTags = 10;</code>
       * @return The count of applicationTags.
       */
      public int getApplicationTagsCount() {
        return applicationTags_.size();
      }
      /**
       * <code>repeated string applicationTags = 10;</code>
       * @param index The index of the element to return.
       * @return The applicationTags at the given index.
       */
      public java.lang.String getApplicationTags(int index) {
        return applicationTags_.get(index);
      }
      /**
       * <code>repeated string applicationTags = 10;</code>
       * @param index The index of the value to return.
       * @return The bytes of the applicationTags at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationTagsBytes(int index) {
        return applicationTags_.getByteString(index);
      }
      /**
       * <code>repeated string applicationTags = 10;</code>
       * @param index The index to set the value at.
       * @param value The applicationTags to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationTags(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTagsIsMutable();
        applicationTags_.set(index, value);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 10;</code>
       * @param value The applicationTags to add.
       * @return This builder for chaining.
       */
      public Builder addApplicationTags(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTagsIsMutable();
        applicationTags_.add(value);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 10;</code>
       * @param values The applicationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllApplicationTags(
          java.lang.Iterable<java.lang.String> values) {
        ensureApplicationTagsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, applicationTags_);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 10;</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationTags() {
        applicationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000200);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 10;</code>
       * @param value The bytes of the applicationTags to add.
       * @return This builder for chaining.
       */
      public Builder addApplicationTagsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTagsIsMutable();
        applicationTags_.add(value);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }

      private int scope_ = 0;
      /**
       * <code>optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL];</code>
       * @return Whether the scope field is set.
       */
      @java.lang.Override public boolean hasScope() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL];</code>
       * @return The scope.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto getScope() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto.forNumber(scope_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto.ALL : result;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL];</code>
       * @param value The scope to set.
       * @return This builder for chaining.
       */
      public Builder setScope(org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000400;
        scope_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL];</code>
       * @return This builder for chaining.
       */
      public Builder clearScope() {
        bitField0_ = (bitField0_ & ~0x00000400);
        scope_ = 0;
        onChanged();
        return this;
      }

      private java.lang.Object name_ = "";
      /**
       * <code>optional string name = 12;</code>
       * @return Whether the name field is set.
       */
      public boolean hasName() {
        return ((bitField0_ & 0x00000800) != 0);
      }
      /**
       * <code>optional string name = 12;</code>
       * @return The name.
       */
      public java.lang.String getName() {
        java.lang.Object ref = name_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            name_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string name = 12;</code>
       * @return The bytes for name.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNameBytes() {
        java.lang.Object ref = name_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          name_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string name = 12;</code>
       * @param value The name to set.
       * @return This builder for chaining.
       */
      public Builder setName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      /**
       * <code>optional string name = 12;</code>
       * @return This builder for chaining.
       */
      public Builder clearName() {
        name_ = getDefaultInstance().getName();
        bitField0_ = (bitField0_ & ~0x00000800);
        onChanged();
        return this;
      }
      /**
       * <code>optional string name = 12;</code>
       * @param value The bytes for name to set.
       * @return This builder for chaining.
       */
      public Builder setNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationsRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationsRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationsRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetApplicationsRequestProto>() {
      @java.lang.Override
      public GetApplicationsRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationsRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationsRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetApplicationsResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationsResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> 
        getApplicationsList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index);
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
     */
    int getApplicationsCount();
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> 
        getApplicationsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetApplicationsResponseProto}
   */
  public static final class GetApplicationsResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationsResponseProto)
      GetApplicationsResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetApplicationsResponseProto.newBuilder() to construct.
    private GetApplicationsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetApplicationsResponseProto() {
      applications_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetApplicationsResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.Builder.class);
    }

    public static final int APPLICATIONS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> applications_;
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> getApplicationsList() {
      return applications_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> 
        getApplicationsOrBuilderList() {
      return applications_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
     */
    @java.lang.Override
    public int getApplicationsCount() {
      return applications_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index) {
      return applications_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder(
        int index) {
      return applications_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getApplicationsCount(); i++) {
        if (!getApplications(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < applications_.size(); i++) {
        output.writeMessage(1, applications_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < applications_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, applications_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto) obj;

      if (!getApplicationsList()
          .equals(other.getApplicationsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getApplicationsCount() > 0) {
        hash = (37 * hash) + APPLICATIONS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetApplicationsResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationsResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (applicationsBuilder_ == null) {
          applications_ = java.util.Collections.emptyList();
        } else {
          applications_ = null;
          applicationsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto result) {
        if (applicationsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            applications_ = java.util.Collections.unmodifiableList(applications_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.applications_ = applications_;
        } else {
          result.applications_ = applicationsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.getDefaultInstance()) return this;
        if (applicationsBuilder_ == null) {
          if (!other.applications_.isEmpty()) {
            if (applications_.isEmpty()) {
              applications_ = other.applications_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureApplicationsIsMutable();
              applications_.addAll(other.applications_);
            }
            onChanged();
          }
        } else {
          if (!other.applications_.isEmpty()) {
            if (applicationsBuilder_.isEmpty()) {
              applicationsBuilder_.dispose();
              applicationsBuilder_ = null;
              applications_ = other.applications_;
              bitField0_ = (bitField0_ & ~0x00000001);
              applicationsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationsFieldBuilder() : null;
            } else {
              applicationsBuilder_.addAllMessages(other.applications_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getApplicationsCount(); i++) {
          if (!getApplications(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.PARSER,
                        extensionRegistry);
                if (applicationsBuilder_ == null) {
                  ensureApplicationsIsMutable();
                  applications_.add(m);
                } else {
                  applicationsBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> applications_ =
        java.util.Collections.emptyList();
      private void ensureApplicationsIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          applications_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto>(applications_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> applicationsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> getApplicationsList() {
        if (applicationsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applications_);
        } else {
          return applicationsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public int getApplicationsCount() {
        if (applicationsBuilder_ == null) {
          return applications_.size();
        } else {
          return applicationsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index) {
        if (applicationsBuilder_ == null) {
          return applications_.get(index);
        } else {
          return applicationsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public Builder setApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) {
        if (applicationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationsIsMutable();
          applications_.set(index, value);
          onChanged();
        } else {
          applicationsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public Builder setApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) {
        if (applicationsBuilder_ == null) {
          ensureApplicationsIsMutable();
          applications_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public Builder addApplications(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) {
        if (applicationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationsIsMutable();
          applications_.add(value);
          onChanged();
        } else {
          applicationsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public Builder addApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) {
        if (applicationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationsIsMutable();
          applications_.add(index, value);
          onChanged();
        } else {
          applicationsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public Builder addApplications(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) {
        if (applicationsBuilder_ == null) {
          ensureApplicationsIsMutable();
          applications_.add(builderForValue.build());
          onChanged();
        } else {
          applicationsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public Builder addApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) {
        if (applicationsBuilder_ == null) {
          ensureApplicationsIsMutable();
          applications_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public Builder addAllApplications(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> values) {
        if (applicationsBuilder_ == null) {
          ensureApplicationsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applications_);
          onChanged();
        } else {
          applicationsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public Builder clearApplications() {
        if (applicationsBuilder_ == null) {
          applications_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          applicationsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public Builder removeApplications(int index) {
        if (applicationsBuilder_ == null) {
          ensureApplicationsIsMutable();
          applications_.remove(index);
          onChanged();
        } else {
          applicationsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder getApplicationsBuilder(
          int index) {
        return getApplicationsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder(
          int index) {
        if (applicationsBuilder_ == null) {
          return applications_.get(index);  } else {
          return applicationsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> 
           getApplicationsOrBuilderList() {
        if (applicationsBuilder_ != null) {
          return applicationsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applications_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder addApplicationsBuilder() {
        return getApplicationsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder addApplicationsBuilder(
          int index) {
        return getApplicationsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder> 
           getApplicationsBuilderList() {
        return getApplicationsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> 
          getApplicationsFieldBuilder() {
        if (applicationsBuilder_ == null) {
          applicationsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder>(
                  applications_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          applications_ = null;
        }
        return applicationsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationsResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationsResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationsResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetApplicationsResponseProto>() {
      @java.lang.Override
      public GetApplicationsResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationsResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationsResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetClusterNodesRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodesRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
     * @return A list containing the nodeStates.
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto> getNodeStatesList();
    /**
     * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
     * @return The count of nodeStates.
     */
    int getNodeStatesCount();
    /**
     * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
     * @param index The index of the element to return.
     * @return The nodeStates at the given index.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeStates(int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetClusterNodesRequestProto}
   */
  public static final class GetClusterNodesRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodesRequestProto)
      GetClusterNodesRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetClusterNodesRequestProto.newBuilder() to construct.
    private GetClusterNodesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetClusterNodesRequestProto() {
      nodeStates_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetClusterNodesRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.Builder.class);
    }

    public static final int NODESTATES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<java.lang.Integer> nodeStates_;
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter<
        java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto> nodeStates_converter_ =
            new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter<
                java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto>() {
              public org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto convert(java.lang.Integer from) {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.forNumber(from);
                return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.NS_NEW : result;
              }
            };
    /**
     * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
     * @return A list containing the nodeStates.
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto> getNodeStatesList() {
      return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter<
          java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto>(nodeStates_, nodeStates_converter_);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
     * @return The count of nodeStates.
     */
    @java.lang.Override
    public int getNodeStatesCount() {
      return nodeStates_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
     * @param index The index of the element to return.
     * @return The nodeStates at the given index.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeStates(int index) {
      return nodeStates_converter_.convert(nodeStates_.get(index));
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < nodeStates_.size(); i++) {
        output.writeEnum(1, nodeStates_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      {
        int dataSize = 0;
        for (int i = 0; i < nodeStates_.size(); i++) {
          dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
            .computeEnumSizeNoTag(nodeStates_.get(i));
        }
        size += dataSize;
        size += 1 * nodeStates_.size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto) obj;

      if (!nodeStates_.equals(other.nodeStates_)) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getNodeStatesCount() > 0) {
        hash = (37 * hash) + NODESTATES_FIELD_NUMBER;
        hash = (53 * hash) + nodeStates_.hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetClusterNodesRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodesRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        nodeStates_ = java.util.Collections.emptyList();
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto result) {
        if (((bitField0_ & 0x00000001) != 0)) {
          nodeStates_ = java.util.Collections.unmodifiableList(nodeStates_);
          bitField0_ = (bitField0_ & ~0x00000001);
        }
        result.nodeStates_ = nodeStates_;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.getDefaultInstance()) return this;
        if (!other.nodeStates_.isEmpty()) {
          if (nodeStates_.isEmpty()) {
            nodeStates_ = other.nodeStates_;
            bitField0_ = (bitField0_ & ~0x00000001);
          } else {
            ensureNodeStatesIsMutable();
            nodeStates_.addAll(other.nodeStates_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  ensureNodeStatesIsMutable();
                  nodeStates_.add(tmpRaw);
                }
                break;
              } // case 8
              case 10: {
                int length = input.readRawVarint32();
                int oldLimit = input.pushLimit(length);
                while(input.getBytesUntilLimit() > 0) {
                  int tmpRaw = input.readEnum();
                  org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto tmpValue =
                      org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.forNumber(tmpRaw);
                  if (tmpValue == null) {
                    mergeUnknownVarintField(1, tmpRaw);
                  } else {
                    ensureNodeStatesIsMutable();
                    nodeStates_.add(tmpRaw);
                  }
                }
                input.popLimit(oldLimit);
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<java.lang.Integer> nodeStates_ =
        java.util.Collections.emptyList();
      private void ensureNodeStatesIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          nodeStates_ = new java.util.ArrayList<java.lang.Integer>(nodeStates_);
          bitField0_ |= 0x00000001;
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
       * @return A list containing the nodeStates.
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto> getNodeStatesList() {
        return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter<
            java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto>(nodeStates_, nodeStates_converter_);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
       * @return The count of nodeStates.
       */
      public int getNodeStatesCount() {
        return nodeStates_.size();
      }
      /**
       * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
       * @param index The index of the element to return.
       * @return The nodeStates at the given index.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeStates(int index) {
        return nodeStates_converter_.convert(nodeStates_.get(index));
      }
      /**
       * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
       * @param index The index to set the value at.
       * @param value The nodeStates to set.
       * @return This builder for chaining.
       */
      public Builder setNodeStates(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        ensureNodeStatesIsMutable();
        nodeStates_.set(index, value.getNumber());
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
       * @param value The nodeStates to add.
       * @return This builder for chaining.
       */
      public Builder addNodeStates(org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        ensureNodeStatesIsMutable();
        nodeStates_.add(value.getNumber());
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
       * @param values The nodeStates to add.
       * @return This builder for chaining.
       */
      public Builder addAllNodeStates(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto> values) {
        ensureNodeStatesIsMutable();
        for (org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto value : values) {
          nodeStates_.add(value.getNumber());
        }
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeStateProto nodeStates = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeStates() {
        nodeStates_ = java.util.Collections.emptyList();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodesRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodesRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodesRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetClusterNodesRequestProto>() {
      @java.lang.Override
      public GetClusterNodesRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodesRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodesRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetClusterNodesResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodesResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> 
        getNodeReportsList();
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getNodeReports(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
     */
    int getNodeReportsCount();
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> 
        getNodeReportsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getNodeReportsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetClusterNodesResponseProto}
   */
  public static final class GetClusterNodesResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodesResponseProto)
      GetClusterNodesResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetClusterNodesResponseProto.newBuilder() to construct.
    private GetClusterNodesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetClusterNodesResponseProto() {
      nodeReports_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetClusterNodesResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.Builder.class);
    }

    public static final int NODEREPORTS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> nodeReports_;
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> getNodeReportsList() {
      return nodeReports_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> 
        getNodeReportsOrBuilderList() {
      return nodeReports_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
     */
    @java.lang.Override
    public int getNodeReportsCount() {
      return nodeReports_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getNodeReports(int index) {
      return nodeReports_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getNodeReportsOrBuilder(
        int index) {
      return nodeReports_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getNodeReportsCount(); i++) {
        if (!getNodeReports(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < nodeReports_.size(); i++) {
        output.writeMessage(1, nodeReports_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < nodeReports_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, nodeReports_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto) obj;

      if (!getNodeReportsList()
          .equals(other.getNodeReportsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getNodeReportsCount() > 0) {
        hash = (37 * hash) + NODEREPORTS_FIELD_NUMBER;
        hash = (53 * hash) + getNodeReportsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetClusterNodesResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodesResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (nodeReportsBuilder_ == null) {
          nodeReports_ = java.util.Collections.emptyList();
        } else {
          nodeReports_ = null;
          nodeReportsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto result) {
        if (nodeReportsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            nodeReports_ = java.util.Collections.unmodifiableList(nodeReports_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.nodeReports_ = nodeReports_;
        } else {
          result.nodeReports_ = nodeReportsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.getDefaultInstance()) return this;
        if (nodeReportsBuilder_ == null) {
          if (!other.nodeReports_.isEmpty()) {
            if (nodeReports_.isEmpty()) {
              nodeReports_ = other.nodeReports_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureNodeReportsIsMutable();
              nodeReports_.addAll(other.nodeReports_);
            }
            onChanged();
          }
        } else {
          if (!other.nodeReports_.isEmpty()) {
            if (nodeReportsBuilder_.isEmpty()) {
              nodeReportsBuilder_.dispose();
              nodeReportsBuilder_ = null;
              nodeReports_ = other.nodeReports_;
              bitField0_ = (bitField0_ & ~0x00000001);
              nodeReportsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNodeReportsFieldBuilder() : null;
            } else {
              nodeReportsBuilder_.addAllMessages(other.nodeReports_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getNodeReportsCount(); i++) {
          if (!getNodeReports(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.PARSER,
                        extensionRegistry);
                if (nodeReportsBuilder_ == null) {
                  ensureNodeReportsIsMutable();
                  nodeReports_.add(m);
                } else {
                  nodeReportsBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> nodeReports_ =
        java.util.Collections.emptyList();
      private void ensureNodeReportsIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          nodeReports_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto>(nodeReports_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> nodeReportsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> getNodeReportsList() {
        if (nodeReportsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nodeReports_);
        } else {
          return nodeReportsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public int getNodeReportsCount() {
        if (nodeReportsBuilder_ == null) {
          return nodeReports_.size();
        } else {
          return nodeReportsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getNodeReports(int index) {
        if (nodeReportsBuilder_ == null) {
          return nodeReports_.get(index);
        } else {
          return nodeReportsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public Builder setNodeReports(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) {
        if (nodeReportsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeReportsIsMutable();
          nodeReports_.set(index, value);
          onChanged();
        } else {
          nodeReportsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public Builder setNodeReports(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) {
        if (nodeReportsBuilder_ == null) {
          ensureNodeReportsIsMutable();
          nodeReports_.set(index, builderForValue.build());
          onChanged();
        } else {
          nodeReportsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public Builder addNodeReports(org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) {
        if (nodeReportsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeReportsIsMutable();
          nodeReports_.add(value);
          onChanged();
        } else {
          nodeReportsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public Builder addNodeReports(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) {
        if (nodeReportsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeReportsIsMutable();
          nodeReports_.add(index, value);
          onChanged();
        } else {
          nodeReportsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public Builder addNodeReports(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) {
        if (nodeReportsBuilder_ == null) {
          ensureNodeReportsIsMutable();
          nodeReports_.add(builderForValue.build());
          onChanged();
        } else {
          nodeReportsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public Builder addNodeReports(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) {
        if (nodeReportsBuilder_ == null) {
          ensureNodeReportsIsMutable();
          nodeReports_.add(index, builderForValue.build());
          onChanged();
        } else {
          nodeReportsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public Builder addAllNodeReports(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto> values) {
        if (nodeReportsBuilder_ == null) {
          ensureNodeReportsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nodeReports_);
          onChanged();
        } else {
          nodeReportsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public Builder clearNodeReports() {
        if (nodeReportsBuilder_ == null) {
          nodeReports_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          nodeReportsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public Builder removeNodeReports(int index) {
        if (nodeReportsBuilder_ == null) {
          ensureNodeReportsIsMutable();
          nodeReports_.remove(index);
          onChanged();
        } else {
          nodeReportsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder getNodeReportsBuilder(
          int index) {
        return getNodeReportsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getNodeReportsOrBuilder(
          int index) {
        if (nodeReportsBuilder_ == null) {
          return nodeReports_.get(index);  } else {
          return nodeReportsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> 
           getNodeReportsOrBuilderList() {
        if (nodeReportsBuilder_ != null) {
          return nodeReportsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nodeReports_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder addNodeReportsBuilder() {
        return getNodeReportsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder addNodeReportsBuilder(
          int index) {
        return getNodeReportsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeReportProto nodeReports = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder> 
           getNodeReportsBuilderList() {
        return getNodeReportsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> 
          getNodeReportsFieldBuilder() {
        if (nodeReportsBuilder_ == null) {
          nodeReportsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder>(
                  nodeReports_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          nodeReports_ = null;
        }
        return nodeReportsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodesResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodesResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodesResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetClusterNodesResponseProto>() {
      @java.lang.Override
      public GetClusterNodesResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodesResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodesResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetQueueInfoRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetQueueInfoRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string queueName = 1;</code>
     * @return Whether the queueName field is set.
     */
    boolean hasQueueName();
    /**
     * <code>optional string queueName = 1;</code>
     * @return The queueName.
     */
    java.lang.String getQueueName();
    /**
     * <code>optional string queueName = 1;</code>
     * @return The bytes for queueName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueNameBytes();

    /**
     * <code>optional bool includeApplications = 2;</code>
     * @return Whether the includeApplications field is set.
     */
    boolean hasIncludeApplications();
    /**
     * <code>optional bool includeApplications = 2;</code>
     * @return The includeApplications.
     */
    boolean getIncludeApplications();

    /**
     * <code>optional bool includeChildQueues = 3;</code>
     * @return Whether the includeChildQueues field is set.
     */
    boolean hasIncludeChildQueues();
    /**
     * <code>optional bool includeChildQueues = 3;</code>
     * @return The includeChildQueues.
     */
    boolean getIncludeChildQueues();

    /**
     * <code>optional bool recursive = 4;</code>
     * @return Whether the recursive field is set.
     */
    boolean hasRecursive();
    /**
     * <code>optional bool recursive = 4;</code>
     * @return The recursive.
     */
    boolean getRecursive();

    /**
     * <code>optional string subClusterId = 5;</code>
     * @return Whether the subClusterId field is set.
     */
    boolean hasSubClusterId();
    /**
     * <code>optional string subClusterId = 5;</code>
     * @return The subClusterId.
     */
    java.lang.String getSubClusterId();
    /**
     * <code>optional string subClusterId = 5;</code>
     * @return The bytes for subClusterId.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterIdBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetQueueInfoRequestProto}
   */
  public static final class GetQueueInfoRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetQueueInfoRequestProto)
      GetQueueInfoRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetQueueInfoRequestProto.newBuilder() to construct.
    private GetQueueInfoRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetQueueInfoRequestProto() {
      queueName_ = "";
      subClusterId_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetQueueInfoRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int QUEUENAME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object queueName_ = "";
    /**
     * <code>optional string queueName = 1;</code>
     * @return Whether the queueName field is set.
     */
    @java.lang.Override
    public boolean hasQueueName() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string queueName = 1;</code>
     * @return The queueName.
     */
    @java.lang.Override
    public java.lang.String getQueueName() {
      java.lang.Object ref = queueName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queueName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string queueName = 1;</code>
     * @return The bytes for queueName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueNameBytes() {
      java.lang.Object ref = queueName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queueName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int INCLUDEAPPLICATIONS_FIELD_NUMBER = 2;
    private boolean includeApplications_ = false;
    /**
     * <code>optional bool includeApplications = 2;</code>
     * @return Whether the includeApplications field is set.
     */
    @java.lang.Override
    public boolean hasIncludeApplications() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional bool includeApplications = 2;</code>
     * @return The includeApplications.
     */
    @java.lang.Override
    public boolean getIncludeApplications() {
      return includeApplications_;
    }

    public static final int INCLUDECHILDQUEUES_FIELD_NUMBER = 3;
    private boolean includeChildQueues_ = false;
    /**
     * <code>optional bool includeChildQueues = 3;</code>
     * @return Whether the includeChildQueues field is set.
     */
    @java.lang.Override
    public boolean hasIncludeChildQueues() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional bool includeChildQueues = 3;</code>
     * @return The includeChildQueues.
     */
    @java.lang.Override
    public boolean getIncludeChildQueues() {
      return includeChildQueues_;
    }

    public static final int RECURSIVE_FIELD_NUMBER = 4;
    private boolean recursive_ = false;
    /**
     * <code>optional bool recursive = 4;</code>
     * @return Whether the recursive field is set.
     */
    @java.lang.Override
    public boolean hasRecursive() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional bool recursive = 4;</code>
     * @return The recursive.
     */
    @java.lang.Override
    public boolean getRecursive() {
      return recursive_;
    }

    public static final int SUBCLUSTERID_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private volatile java.lang.Object subClusterId_ = "";
    /**
     * <code>optional string subClusterId = 5;</code>
     * @return Whether the subClusterId field is set.
     */
    @java.lang.Override
    public boolean hasSubClusterId() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional string subClusterId = 5;</code>
     * @return The subClusterId.
     */
    @java.lang.Override
    public java.lang.String getSubClusterId() {
      java.lang.Object ref = subClusterId_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          subClusterId_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string subClusterId = 5;</code>
     * @return The bytes for subClusterId.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterIdBytes() {
      java.lang.Object ref = subClusterId_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        subClusterId_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queueName_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeBool(2, includeApplications_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeBool(3, includeChildQueues_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeBool(4, recursive_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, subClusterId_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queueName_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(2, includeApplications_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(3, includeChildQueues_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(4, recursive_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, subClusterId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto) obj;

      if (hasQueueName() != other.hasQueueName()) return false;
      if (hasQueueName()) {
        if (!getQueueName()
            .equals(other.getQueueName())) return false;
      }
      if (hasIncludeApplications() != other.hasIncludeApplications()) return false;
      if (hasIncludeApplications()) {
        if (getIncludeApplications()
            != other.getIncludeApplications()) return false;
      }
      if (hasIncludeChildQueues() != other.hasIncludeChildQueues()) return false;
      if (hasIncludeChildQueues()) {
        if (getIncludeChildQueues()
            != other.getIncludeChildQueues()) return false;
      }
      if (hasRecursive() != other.hasRecursive()) return false;
      if (hasRecursive()) {
        if (getRecursive()
            != other.getRecursive()) return false;
      }
      if (hasSubClusterId() != other.hasSubClusterId()) return false;
      if (hasSubClusterId()) {
        if (!getSubClusterId()
            .equals(other.getSubClusterId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasQueueName()) {
        hash = (37 * hash) + QUEUENAME_FIELD_NUMBER;
        hash = (53 * hash) + getQueueName().hashCode();
      }
      if (hasIncludeApplications()) {
        hash = (37 * hash) + INCLUDEAPPLICATIONS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getIncludeApplications());
      }
      if (hasIncludeChildQueues()) {
        hash = (37 * hash) + INCLUDECHILDQUEUES_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getIncludeChildQueues());
      }
      if (hasRecursive()) {
        hash = (37 * hash) + RECURSIVE_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getRecursive());
      }
      if (hasSubClusterId()) {
        hash = (37 * hash) + SUBCLUSTERID_FIELD_NUMBER;
        hash = (53 * hash) + getSubClusterId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetQueueInfoRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetQueueInfoRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        queueName_ = "";
        includeApplications_ = false;
        includeChildQueues_ = false;
        recursive_ = false;
        subClusterId_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.queueName_ = queueName_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.includeApplications_ = includeApplications_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.includeChildQueues_ = includeChildQueues_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.recursive_ = recursive_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.subClusterId_ = subClusterId_;
          to_bitField0_ |= 0x00000010;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.getDefaultInstance()) return this;
        if (other.hasQueueName()) {
          queueName_ = other.queueName_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasIncludeApplications()) {
          setIncludeApplications(other.getIncludeApplications());
        }
        if (other.hasIncludeChildQueues()) {
          setIncludeChildQueues(other.getIncludeChildQueues());
        }
        if (other.hasRecursive()) {
          setRecursive(other.getRecursive());
        }
        if (other.hasSubClusterId()) {
          subClusterId_ = other.subClusterId_;
          bitField0_ |= 0x00000010;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                queueName_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                includeApplications_ = input.readBool();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 24: {
                includeChildQueues_ = input.readBool();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 32: {
                recursive_ = input.readBool();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              case 42: {
                subClusterId_ = input.readBytes();
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object queueName_ = "";
      /**
       * <code>optional string queueName = 1;</code>
       * @return Whether the queueName field is set.
       */
      public boolean hasQueueName() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @return The queueName.
       */
      public java.lang.String getQueueName() {
        java.lang.Object ref = queueName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queueName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @return The bytes for queueName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueNameBytes() {
        java.lang.Object ref = queueName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queueName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @param value The queueName to set.
       * @return This builder for chaining.
       */
      public Builder setQueueName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        queueName_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueueName() {
        queueName_ = getDefaultInstance().getQueueName();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @param value The bytes for queueName to set.
       * @return This builder for chaining.
       */
      public Builder setQueueNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        queueName_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private boolean includeApplications_ ;
      /**
       * <code>optional bool includeApplications = 2;</code>
       * @return Whether the includeApplications field is set.
       */
      @java.lang.Override
      public boolean hasIncludeApplications() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional bool includeApplications = 2;</code>
       * @return The includeApplications.
       */
      @java.lang.Override
      public boolean getIncludeApplications() {
        return includeApplications_;
      }
      /**
       * <code>optional bool includeApplications = 2;</code>
       * @param value The includeApplications to set.
       * @return This builder for chaining.
       */
      public Builder setIncludeApplications(boolean value) {

        includeApplications_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool includeApplications = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearIncludeApplications() {
        bitField0_ = (bitField0_ & ~0x00000002);
        includeApplications_ = false;
        onChanged();
        return this;
      }

      private boolean includeChildQueues_ ;
      /**
       * <code>optional bool includeChildQueues = 3;</code>
       * @return Whether the includeChildQueues field is set.
       */
      @java.lang.Override
      public boolean hasIncludeChildQueues() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional bool includeChildQueues = 3;</code>
       * @return The includeChildQueues.
       */
      @java.lang.Override
      public boolean getIncludeChildQueues() {
        return includeChildQueues_;
      }
      /**
       * <code>optional bool includeChildQueues = 3;</code>
       * @param value The includeChildQueues to set.
       * @return This builder for chaining.
       */
      public Builder setIncludeChildQueues(boolean value) {

        includeChildQueues_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool includeChildQueues = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearIncludeChildQueues() {
        bitField0_ = (bitField0_ & ~0x00000004);
        includeChildQueues_ = false;
        onChanged();
        return this;
      }

      private boolean recursive_ ;
      /**
       * <code>optional bool recursive = 4;</code>
       * @return Whether the recursive field is set.
       */
      @java.lang.Override
      public boolean hasRecursive() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional bool recursive = 4;</code>
       * @return The recursive.
       */
      @java.lang.Override
      public boolean getRecursive() {
        return recursive_;
      }
      /**
       * <code>optional bool recursive = 4;</code>
       * @param value The recursive to set.
       * @return This builder for chaining.
       */
      public Builder setRecursive(boolean value) {

        recursive_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool recursive = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearRecursive() {
        bitField0_ = (bitField0_ & ~0x00000008);
        recursive_ = false;
        onChanged();
        return this;
      }

      private java.lang.Object subClusterId_ = "";
      /**
       * <code>optional string subClusterId = 5;</code>
       * @return Whether the subClusterId field is set.
       */
      public boolean hasSubClusterId() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional string subClusterId = 5;</code>
       * @return The subClusterId.
       */
      public java.lang.String getSubClusterId() {
        java.lang.Object ref = subClusterId_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            subClusterId_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string subClusterId = 5;</code>
       * @return The bytes for subClusterId.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getSubClusterIdBytes() {
        java.lang.Object ref = subClusterId_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          subClusterId_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string subClusterId = 5;</code>
       * @param value The subClusterId to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterId(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterId_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterId = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearSubClusterId() {
        subClusterId_ = getDefaultInstance().getSubClusterId();
        bitField0_ = (bitField0_ & ~0x00000010);
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterId = 5;</code>
       * @param value The bytes for subClusterId to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterIdBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterId_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetQueueInfoRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetQueueInfoRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueInfoRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetQueueInfoRequestProto>() {
      @java.lang.Override
      public GetQueueInfoRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueInfoRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueInfoRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetQueueInfoResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetQueueInfoResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
     * @return Whether the queueInfo field is set.
     */
    boolean hasQueueInfo();
    /**
     * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
     * @return The queueInfo.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getQueueInfo();
    /**
     * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getQueueInfoOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetQueueInfoResponseProto}
   */
  public static final class GetQueueInfoResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetQueueInfoResponseProto)
      GetQueueInfoResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetQueueInfoResponseProto.newBuilder() to construct.
    private GetQueueInfoResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetQueueInfoResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetQueueInfoResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int QUEUEINFO_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto queueInfo_;
    /**
     * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
     * @return Whether the queueInfo field is set.
     */
    @java.lang.Override
    public boolean hasQueueInfo() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
     * @return The queueInfo.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getQueueInfo() {
      return queueInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance() : queueInfo_;
    }
    /**
     * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getQueueInfoOrBuilder() {
      return queueInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance() : queueInfo_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasQueueInfo()) {
        if (!getQueueInfo().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getQueueInfo());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getQueueInfo());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto) obj;

      if (hasQueueInfo() != other.hasQueueInfo()) return false;
      if (hasQueueInfo()) {
        if (!getQueueInfo()
            .equals(other.getQueueInfo())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasQueueInfo()) {
        hash = (37 * hash) + QUEUEINFO_FIELD_NUMBER;
        hash = (53 * hash) + getQueueInfo().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetQueueInfoResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetQueueInfoResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getQueueInfoFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        queueInfo_ = null;
        if (queueInfoBuilder_ != null) {
          queueInfoBuilder_.dispose();
          queueInfoBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.queueInfo_ = queueInfoBuilder_ == null
              ? queueInfo_
              : queueInfoBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.getDefaultInstance()) return this;
        if (other.hasQueueInfo()) {
          mergeQueueInfo(other.getQueueInfo());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasQueueInfo()) {
          if (!getQueueInfo().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getQueueInfoFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto queueInfo_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> queueInfoBuilder_;
      /**
       * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
       * @return Whether the queueInfo field is set.
       */
      public boolean hasQueueInfo() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
       * @return The queueInfo.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getQueueInfo() {
        if (queueInfoBuilder_ == null) {
          return queueInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance() : queueInfo_;
        } else {
          return queueInfoBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
       */
      public Builder setQueueInfo(org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto value) {
        if (queueInfoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          queueInfo_ = value;
        } else {
          queueInfoBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
       */
      public Builder setQueueInfo(
          org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder builderForValue) {
        if (queueInfoBuilder_ == null) {
          queueInfo_ = builderForValue.build();
        } else {
          queueInfoBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
       */
      public Builder mergeQueueInfo(org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto value) {
        if (queueInfoBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            queueInfo_ != null &&
            queueInfo_ != org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance()) {
            getQueueInfoBuilder().mergeFrom(value);
          } else {
            queueInfo_ = value;
          }
        } else {
          queueInfoBuilder_.mergeFrom(value);
        }
        if (queueInfo_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
       */
      public Builder clearQueueInfo() {
        bitField0_ = (bitField0_ & ~0x00000001);
        queueInfo_ = null;
        if (queueInfoBuilder_ != null) {
          queueInfoBuilder_.dispose();
          queueInfoBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder getQueueInfoBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getQueueInfoFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getQueueInfoOrBuilder() {
        if (queueInfoBuilder_ != null) {
          return queueInfoBuilder_.getMessageOrBuilder();
        } else {
          return queueInfo_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance() : queueInfo_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.QueueInfoProto queueInfo = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> 
          getQueueInfoFieldBuilder() {
        if (queueInfoBuilder_ == null) {
          queueInfoBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder>(
                  getQueueInfo(),
                  getParentForChildren(),
                  isClean());
          queueInfo_ = null;
        }
        return queueInfoBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetQueueInfoResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetQueueInfoResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueInfoResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetQueueInfoResponseProto>() {
      @java.lang.Override
      public GetQueueInfoResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueInfoResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueInfoResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetQueueUserAclsInfoRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetQueueUserAclsInfoRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetQueueUserAclsInfoRequestProto}
   */
  public static final class GetQueueUserAclsInfoRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetQueueUserAclsInfoRequestProto)
      GetQueueUserAclsInfoRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetQueueUserAclsInfoRequestProto.newBuilder() to construct.
    private GetQueueUserAclsInfoRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetQueueUserAclsInfoRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetQueueUserAclsInfoRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetQueueUserAclsInfoRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetQueueUserAclsInfoRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetQueueUserAclsInfoRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetQueueUserAclsInfoRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueUserAclsInfoRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetQueueUserAclsInfoRequestProto>() {
      @java.lang.Override
      public GetQueueUserAclsInfoRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueUserAclsInfoRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueUserAclsInfoRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetQueueUserAclsInfoResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetQueueUserAclsInfoResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto> 
        getQueueUserAclsList();
    /**
     * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getQueueUserAcls(int index);
    /**
     * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
     */
    int getQueueUserAclsCount();
    /**
     * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder> 
        getQueueUserAclsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder getQueueUserAclsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetQueueUserAclsInfoResponseProto}
   */
  public static final class GetQueueUserAclsInfoResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetQueueUserAclsInfoResponseProto)
      GetQueueUserAclsInfoResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetQueueUserAclsInfoResponseProto.newBuilder() to construct.
    private GetQueueUserAclsInfoResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetQueueUserAclsInfoResponseProto() {
      queueUserAcls_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetQueueUserAclsInfoResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.Builder.class);
    }

    public static final int QUEUEUSERACLS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto> queueUserAcls_;
    /**
     * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto> getQueueUserAclsList() {
      return queueUserAcls_;
    }
    /**
     * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder> 
        getQueueUserAclsOrBuilderList() {
      return queueUserAcls_;
    }
    /**
     * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
     */
    @java.lang.Override
    public int getQueueUserAclsCount() {
      return queueUserAcls_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getQueueUserAcls(int index) {
      return queueUserAcls_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder getQueueUserAclsOrBuilder(
        int index) {
      return queueUserAcls_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < queueUserAcls_.size(); i++) {
        output.writeMessage(1, queueUserAcls_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < queueUserAcls_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, queueUserAcls_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto) obj;

      if (!getQueueUserAclsList()
          .equals(other.getQueueUserAclsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getQueueUserAclsCount() > 0) {
        hash = (37 * hash) + QUEUEUSERACLS_FIELD_NUMBER;
        hash = (53 * hash) + getQueueUserAclsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetQueueUserAclsInfoResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetQueueUserAclsInfoResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (queueUserAclsBuilder_ == null) {
          queueUserAcls_ = java.util.Collections.emptyList();
        } else {
          queueUserAcls_ = null;
          queueUserAclsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto result) {
        if (queueUserAclsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            queueUserAcls_ = java.util.Collections.unmodifiableList(queueUserAcls_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.queueUserAcls_ = queueUserAcls_;
        } else {
          result.queueUserAcls_ = queueUserAclsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.getDefaultInstance()) return this;
        if (queueUserAclsBuilder_ == null) {
          if (!other.queueUserAcls_.isEmpty()) {
            if (queueUserAcls_.isEmpty()) {
              queueUserAcls_ = other.queueUserAcls_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureQueueUserAclsIsMutable();
              queueUserAcls_.addAll(other.queueUserAcls_);
            }
            onChanged();
          }
        } else {
          if (!other.queueUserAcls_.isEmpty()) {
            if (queueUserAclsBuilder_.isEmpty()) {
              queueUserAclsBuilder_.dispose();
              queueUserAclsBuilder_ = null;
              queueUserAcls_ = other.queueUserAcls_;
              bitField0_ = (bitField0_ & ~0x00000001);
              queueUserAclsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getQueueUserAclsFieldBuilder() : null;
            } else {
              queueUserAclsBuilder_.addAllMessages(other.queueUserAcls_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.PARSER,
                        extensionRegistry);
                if (queueUserAclsBuilder_ == null) {
                  ensureQueueUserAclsIsMutable();
                  queueUserAcls_.add(m);
                } else {
                  queueUserAclsBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto> queueUserAcls_ =
        java.util.Collections.emptyList();
      private void ensureQueueUserAclsIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          queueUserAcls_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto>(queueUserAcls_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder> queueUserAclsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto> getQueueUserAclsList() {
        if (queueUserAclsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(queueUserAcls_);
        } else {
          return queueUserAclsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public int getQueueUserAclsCount() {
        if (queueUserAclsBuilder_ == null) {
          return queueUserAcls_.size();
        } else {
          return queueUserAclsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getQueueUserAcls(int index) {
        if (queueUserAclsBuilder_ == null) {
          return queueUserAcls_.get(index);
        } else {
          return queueUserAclsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public Builder setQueueUserAcls(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto value) {
        if (queueUserAclsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureQueueUserAclsIsMutable();
          queueUserAcls_.set(index, value);
          onChanged();
        } else {
          queueUserAclsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public Builder setQueueUserAcls(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder builderForValue) {
        if (queueUserAclsBuilder_ == null) {
          ensureQueueUserAclsIsMutable();
          queueUserAcls_.set(index, builderForValue.build());
          onChanged();
        } else {
          queueUserAclsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public Builder addQueueUserAcls(org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto value) {
        if (queueUserAclsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureQueueUserAclsIsMutable();
          queueUserAcls_.add(value);
          onChanged();
        } else {
          queueUserAclsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public Builder addQueueUserAcls(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto value) {
        if (queueUserAclsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureQueueUserAclsIsMutable();
          queueUserAcls_.add(index, value);
          onChanged();
        } else {
          queueUserAclsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public Builder addQueueUserAcls(
          org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder builderForValue) {
        if (queueUserAclsBuilder_ == null) {
          ensureQueueUserAclsIsMutable();
          queueUserAcls_.add(builderForValue.build());
          onChanged();
        } else {
          queueUserAclsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public Builder addQueueUserAcls(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder builderForValue) {
        if (queueUserAclsBuilder_ == null) {
          ensureQueueUserAclsIsMutable();
          queueUserAcls_.add(index, builderForValue.build());
          onChanged();
        } else {
          queueUserAclsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public Builder addAllQueueUserAcls(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto> values) {
        if (queueUserAclsBuilder_ == null) {
          ensureQueueUserAclsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, queueUserAcls_);
          onChanged();
        } else {
          queueUserAclsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public Builder clearQueueUserAcls() {
        if (queueUserAclsBuilder_ == null) {
          queueUserAcls_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          queueUserAclsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public Builder removeQueueUserAcls(int index) {
        if (queueUserAclsBuilder_ == null) {
          ensureQueueUserAclsIsMutable();
          queueUserAcls_.remove(index);
          onChanged();
        } else {
          queueUserAclsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder getQueueUserAclsBuilder(
          int index) {
        return getQueueUserAclsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder getQueueUserAclsOrBuilder(
          int index) {
        if (queueUserAclsBuilder_ == null) {
          return queueUserAcls_.get(index);  } else {
          return queueUserAclsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder> 
           getQueueUserAclsOrBuilderList() {
        if (queueUserAclsBuilder_ != null) {
          return queueUserAclsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(queueUserAcls_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder addQueueUserAclsBuilder() {
        return getQueueUserAclsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder addQueueUserAclsBuilder(
          int index) {
        return getQueueUserAclsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder> 
           getQueueUserAclsBuilderList() {
        return getQueueUserAclsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder> 
          getQueueUserAclsFieldBuilder() {
        if (queueUserAclsBuilder_ == null) {
          queueUserAclsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder>(
                  queueUserAcls_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          queueUserAcls_ = null;
        }
        return queueUserAclsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetQueueUserAclsInfoResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetQueueUserAclsInfoResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueUserAclsInfoResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetQueueUserAclsInfoResponseProto>() {
      @java.lang.Override
      public GetQueueUserAclsInfoResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueUserAclsInfoResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetQueueUserAclsInfoResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetNodesToLabelsRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNodesToLabelsRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetNodesToLabelsRequestProto}
   */
  public static final class GetNodesToLabelsRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNodesToLabelsRequestProto)
      GetNodesToLabelsRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetNodesToLabelsRequestProto.newBuilder() to construct.
    private GetNodesToLabelsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetNodesToLabelsRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetNodesToLabelsRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetNodesToLabelsRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNodesToLabelsRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNodesToLabelsRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNodesToLabelsRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToLabelsRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetNodesToLabelsRequestProto>() {
      @java.lang.Override
      public GetNodesToLabelsRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToLabelsRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToLabelsRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetNodesToLabelsResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNodesToLabelsResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto> 
        getNodeToLabelsList();
    /**
     * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getNodeToLabels(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
     */
    int getNodeToLabelsCount();
    /**
     * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder> 
        getNodeToLabelsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder getNodeToLabelsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetNodesToLabelsResponseProto}
   */
  public static final class GetNodesToLabelsResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNodesToLabelsResponseProto)
      GetNodesToLabelsResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetNodesToLabelsResponseProto.newBuilder() to construct.
    private GetNodesToLabelsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetNodesToLabelsResponseProto() {
      nodeToLabels_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetNodesToLabelsResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.Builder.class);
    }

    public static final int NODETOLABELS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto> nodeToLabels_;
    /**
     * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto> getNodeToLabelsList() {
      return nodeToLabels_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder> 
        getNodeToLabelsOrBuilderList() {
      return nodeToLabels_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
     */
    @java.lang.Override
    public int getNodeToLabelsCount() {
      return nodeToLabels_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getNodeToLabels(int index) {
      return nodeToLabels_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder getNodeToLabelsOrBuilder(
        int index) {
      return nodeToLabels_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < nodeToLabels_.size(); i++) {
        output.writeMessage(1, nodeToLabels_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < nodeToLabels_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, nodeToLabels_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto) obj;

      if (!getNodeToLabelsList()
          .equals(other.getNodeToLabelsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getNodeToLabelsCount() > 0) {
        hash = (37 * hash) + NODETOLABELS_FIELD_NUMBER;
        hash = (53 * hash) + getNodeToLabelsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetNodesToLabelsResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNodesToLabelsResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (nodeToLabelsBuilder_ == null) {
          nodeToLabels_ = java.util.Collections.emptyList();
        } else {
          nodeToLabels_ = null;
          nodeToLabelsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto result) {
        if (nodeToLabelsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            nodeToLabels_ = java.util.Collections.unmodifiableList(nodeToLabels_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.nodeToLabels_ = nodeToLabels_;
        } else {
          result.nodeToLabels_ = nodeToLabelsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.getDefaultInstance()) return this;
        if (nodeToLabelsBuilder_ == null) {
          if (!other.nodeToLabels_.isEmpty()) {
            if (nodeToLabels_.isEmpty()) {
              nodeToLabels_ = other.nodeToLabels_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureNodeToLabelsIsMutable();
              nodeToLabels_.addAll(other.nodeToLabels_);
            }
            onChanged();
          }
        } else {
          if (!other.nodeToLabels_.isEmpty()) {
            if (nodeToLabelsBuilder_.isEmpty()) {
              nodeToLabelsBuilder_.dispose();
              nodeToLabelsBuilder_ = null;
              nodeToLabels_ = other.nodeToLabels_;
              bitField0_ = (bitField0_ & ~0x00000001);
              nodeToLabelsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNodeToLabelsFieldBuilder() : null;
            } else {
              nodeToLabelsBuilder_.addAllMessages(other.nodeToLabels_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.PARSER,
                        extensionRegistry);
                if (nodeToLabelsBuilder_ == null) {
                  ensureNodeToLabelsIsMutable();
                  nodeToLabels_.add(m);
                } else {
                  nodeToLabelsBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto> nodeToLabels_ =
        java.util.Collections.emptyList();
      private void ensureNodeToLabelsIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          nodeToLabels_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto>(nodeToLabels_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder> nodeToLabelsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto> getNodeToLabelsList() {
        if (nodeToLabelsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nodeToLabels_);
        } else {
          return nodeToLabelsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public int getNodeToLabelsCount() {
        if (nodeToLabelsBuilder_ == null) {
          return nodeToLabels_.size();
        } else {
          return nodeToLabelsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getNodeToLabels(int index) {
        if (nodeToLabelsBuilder_ == null) {
          return nodeToLabels_.get(index);
        } else {
          return nodeToLabelsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public Builder setNodeToLabels(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto value) {
        if (nodeToLabelsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeToLabelsIsMutable();
          nodeToLabels_.set(index, value);
          onChanged();
        } else {
          nodeToLabelsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public Builder setNodeToLabels(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder builderForValue) {
        if (nodeToLabelsBuilder_ == null) {
          ensureNodeToLabelsIsMutable();
          nodeToLabels_.set(index, builderForValue.build());
          onChanged();
        } else {
          nodeToLabelsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public Builder addNodeToLabels(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto value) {
        if (nodeToLabelsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeToLabelsIsMutable();
          nodeToLabels_.add(value);
          onChanged();
        } else {
          nodeToLabelsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public Builder addNodeToLabels(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto value) {
        if (nodeToLabelsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeToLabelsIsMutable();
          nodeToLabels_.add(index, value);
          onChanged();
        } else {
          nodeToLabelsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public Builder addNodeToLabels(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder builderForValue) {
        if (nodeToLabelsBuilder_ == null) {
          ensureNodeToLabelsIsMutable();
          nodeToLabels_.add(builderForValue.build());
          onChanged();
        } else {
          nodeToLabelsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public Builder addNodeToLabels(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder builderForValue) {
        if (nodeToLabelsBuilder_ == null) {
          ensureNodeToLabelsIsMutable();
          nodeToLabels_.add(index, builderForValue.build());
          onChanged();
        } else {
          nodeToLabelsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public Builder addAllNodeToLabels(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto> values) {
        if (nodeToLabelsBuilder_ == null) {
          ensureNodeToLabelsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nodeToLabels_);
          onChanged();
        } else {
          nodeToLabelsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public Builder clearNodeToLabels() {
        if (nodeToLabelsBuilder_ == null) {
          nodeToLabels_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          nodeToLabelsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public Builder removeNodeToLabels(int index) {
        if (nodeToLabelsBuilder_ == null) {
          ensureNodeToLabelsIsMutable();
          nodeToLabels_.remove(index);
          onChanged();
        } else {
          nodeToLabelsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder getNodeToLabelsBuilder(
          int index) {
        return getNodeToLabelsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder getNodeToLabelsOrBuilder(
          int index) {
        if (nodeToLabelsBuilder_ == null) {
          return nodeToLabels_.get(index);  } else {
          return nodeToLabelsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder> 
           getNodeToLabelsOrBuilderList() {
        if (nodeToLabelsBuilder_ != null) {
          return nodeToLabelsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nodeToLabels_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder addNodeToLabelsBuilder() {
        return getNodeToLabelsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder addNodeToLabelsBuilder(
          int index) {
        return getNodeToLabelsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder> 
           getNodeToLabelsBuilderList() {
        return getNodeToLabelsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder> 
          getNodeToLabelsFieldBuilder() {
        if (nodeToLabelsBuilder_ == null) {
          nodeToLabelsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder>(
                  nodeToLabels_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          nodeToLabels_ = null;
        }
        return nodeToLabelsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNodesToLabelsResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNodesToLabelsResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToLabelsResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetNodesToLabelsResponseProto>() {
      @java.lang.Override
      public GetNodesToLabelsResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToLabelsResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToLabelsResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetLabelsToNodesRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetLabelsToNodesRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated string nodeLabels = 1;</code>
     * @return A list containing the nodeLabels.
     */
    java.util.List<java.lang.String>
        getNodeLabelsList();
    /**
     * <code>repeated string nodeLabels = 1;</code>
     * @return The count of nodeLabels.
     */
    int getNodeLabelsCount();
    /**
     * <code>repeated string nodeLabels = 1;</code>
     * @param index The index of the element to return.
     * @return The nodeLabels at the given index.
     */
    java.lang.String getNodeLabels(int index);
    /**
     * <code>repeated string nodeLabels = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the nodeLabels at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelsBytes(int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetLabelsToNodesRequestProto}
   */
  public static final class GetLabelsToNodesRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetLabelsToNodesRequestProto)
      GetLabelsToNodesRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetLabelsToNodesRequestProto.newBuilder() to construct.
    private GetLabelsToNodesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetLabelsToNodesRequestProto() {
      nodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetLabelsToNodesRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.Builder.class);
    }

    public static final int NODELABELS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList nodeLabels_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string nodeLabels = 1;</code>
     * @return A list containing the nodeLabels.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getNodeLabelsList() {
      return nodeLabels_;
    }
    /**
     * <code>repeated string nodeLabels = 1;</code>
     * @return The count of nodeLabels.
     */
    public int getNodeLabelsCount() {
      return nodeLabels_.size();
    }
    /**
     * <code>repeated string nodeLabels = 1;</code>
     * @param index The index of the element to return.
     * @return The nodeLabels at the given index.
     */
    public java.lang.String getNodeLabels(int index) {
      return nodeLabels_.get(index);
    }
    /**
     * <code>repeated string nodeLabels = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the nodeLabels at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelsBytes(int index) {
      return nodeLabels_.getByteString(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < nodeLabels_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, nodeLabels_.getRaw(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      {
        int dataSize = 0;
        for (int i = 0; i < nodeLabels_.size(); i++) {
          dataSize += computeStringSizeNoTag(nodeLabels_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getNodeLabelsList().size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto) obj;

      if (!getNodeLabelsList()
          .equals(other.getNodeLabelsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getNodeLabelsCount() > 0) {
        hash = (37 * hash) + NODELABELS_FIELD_NUMBER;
        hash = (53 * hash) + getNodeLabelsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetLabelsToNodesRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetLabelsToNodesRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        nodeLabels_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto result) {
        int from_bitField0_ = bitField0_;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          nodeLabels_.makeImmutable();
          result.nodeLabels_ = nodeLabels_;
        }
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.getDefaultInstance()) return this;
        if (!other.nodeLabels_.isEmpty()) {
          if (nodeLabels_.isEmpty()) {
            nodeLabels_ = other.nodeLabels_;
            bitField0_ |= 0x00000001;
          } else {
            ensureNodeLabelsIsMutable();
            nodeLabels_.addAll(other.nodeLabels_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureNodeLabelsIsMutable();
                nodeLabels_.add(bs);
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList nodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureNodeLabelsIsMutable() {
        if (!nodeLabels_.isModifiable()) {
          nodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(nodeLabels_);
        }
        bitField0_ |= 0x00000001;
      }
      /**
       * <code>repeated string nodeLabels = 1;</code>
       * @return A list containing the nodeLabels.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getNodeLabelsList() {
        nodeLabels_.makeImmutable();
        return nodeLabels_;
      }
      /**
       * <code>repeated string nodeLabels = 1;</code>
       * @return The count of nodeLabels.
       */
      public int getNodeLabelsCount() {
        return nodeLabels_.size();
      }
      /**
       * <code>repeated string nodeLabels = 1;</code>
       * @param index The index of the element to return.
       * @return The nodeLabels at the given index.
       */
      public java.lang.String getNodeLabels(int index) {
        return nodeLabels_.get(index);
      }
      /**
       * <code>repeated string nodeLabels = 1;</code>
       * @param index The index of the value to return.
       * @return The bytes of the nodeLabels at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNodeLabelsBytes(int index) {
        return nodeLabels_.getByteString(index);
      }
      /**
       * <code>repeated string nodeLabels = 1;</code>
       * @param index The index to set the value at.
       * @param value The nodeLabels to set.
       * @return This builder for chaining.
       */
      public Builder setNodeLabels(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureNodeLabelsIsMutable();
        nodeLabels_.set(index, value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string nodeLabels = 1;</code>
       * @param value The nodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addNodeLabels(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureNodeLabelsIsMutable();
        nodeLabels_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string nodeLabels = 1;</code>
       * @param values The nodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addAllNodeLabels(
          java.lang.Iterable<java.lang.String> values) {
        ensureNodeLabelsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, nodeLabels_);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string nodeLabels = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeLabels() {
        nodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000001);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string nodeLabels = 1;</code>
       * @param value The bytes of the nodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addNodeLabelsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureNodeLabelsIsMutable();
        nodeLabels_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetLabelsToNodesRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetLabelsToNodesRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetLabelsToNodesRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetLabelsToNodesRequestProto>() {
      @java.lang.Override
      public GetLabelsToNodesRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetLabelsToNodesRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetLabelsToNodesRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetLabelsToNodesResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetLabelsToNodesResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto> 
        getLabelsToNodesList();
    /**
     * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getLabelsToNodes(int index);
    /**
     * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
     */
    int getLabelsToNodesCount();
    /**
     * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder> 
        getLabelsToNodesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder getLabelsToNodesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetLabelsToNodesResponseProto}
   */
  public static final class GetLabelsToNodesResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetLabelsToNodesResponseProto)
      GetLabelsToNodesResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetLabelsToNodesResponseProto.newBuilder() to construct.
    private GetLabelsToNodesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetLabelsToNodesResponseProto() {
      labelsToNodes_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetLabelsToNodesResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.Builder.class);
    }

    public static final int LABELSTONODES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto> labelsToNodes_;
    /**
     * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto> getLabelsToNodesList() {
      return labelsToNodes_;
    }
    /**
     * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder> 
        getLabelsToNodesOrBuilderList() {
      return labelsToNodes_;
    }
    /**
     * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
     */
    @java.lang.Override
    public int getLabelsToNodesCount() {
      return labelsToNodes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getLabelsToNodes(int index) {
      return labelsToNodes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder getLabelsToNodesOrBuilder(
        int index) {
      return labelsToNodes_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < labelsToNodes_.size(); i++) {
        output.writeMessage(1, labelsToNodes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < labelsToNodes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, labelsToNodes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto) obj;

      if (!getLabelsToNodesList()
          .equals(other.getLabelsToNodesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getLabelsToNodesCount() > 0) {
        hash = (37 * hash) + LABELSTONODES_FIELD_NUMBER;
        hash = (53 * hash) + getLabelsToNodesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetLabelsToNodesResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetLabelsToNodesResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (labelsToNodesBuilder_ == null) {
          labelsToNodes_ = java.util.Collections.emptyList();
        } else {
          labelsToNodes_ = null;
          labelsToNodesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto result) {
        if (labelsToNodesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            labelsToNodes_ = java.util.Collections.unmodifiableList(labelsToNodes_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.labelsToNodes_ = labelsToNodes_;
        } else {
          result.labelsToNodes_ = labelsToNodesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.getDefaultInstance()) return this;
        if (labelsToNodesBuilder_ == null) {
          if (!other.labelsToNodes_.isEmpty()) {
            if (labelsToNodes_.isEmpty()) {
              labelsToNodes_ = other.labelsToNodes_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureLabelsToNodesIsMutable();
              labelsToNodes_.addAll(other.labelsToNodes_);
            }
            onChanged();
          }
        } else {
          if (!other.labelsToNodes_.isEmpty()) {
            if (labelsToNodesBuilder_.isEmpty()) {
              labelsToNodesBuilder_.dispose();
              labelsToNodesBuilder_ = null;
              labelsToNodes_ = other.labelsToNodes_;
              bitField0_ = (bitField0_ & ~0x00000001);
              labelsToNodesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getLabelsToNodesFieldBuilder() : null;
            } else {
              labelsToNodesBuilder_.addAllMessages(other.labelsToNodes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.PARSER,
                        extensionRegistry);
                if (labelsToNodesBuilder_ == null) {
                  ensureLabelsToNodesIsMutable();
                  labelsToNodes_.add(m);
                } else {
                  labelsToNodesBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto> labelsToNodes_ =
        java.util.Collections.emptyList();
      private void ensureLabelsToNodesIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          labelsToNodes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto>(labelsToNodes_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder> labelsToNodesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto> getLabelsToNodesList() {
        if (labelsToNodesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(labelsToNodes_);
        } else {
          return labelsToNodesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public int getLabelsToNodesCount() {
        if (labelsToNodesBuilder_ == null) {
          return labelsToNodes_.size();
        } else {
          return labelsToNodesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getLabelsToNodes(int index) {
        if (labelsToNodesBuilder_ == null) {
          return labelsToNodes_.get(index);
        } else {
          return labelsToNodesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public Builder setLabelsToNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto value) {
        if (labelsToNodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLabelsToNodesIsMutable();
          labelsToNodes_.set(index, value);
          onChanged();
        } else {
          labelsToNodesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public Builder setLabelsToNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder builderForValue) {
        if (labelsToNodesBuilder_ == null) {
          ensureLabelsToNodesIsMutable();
          labelsToNodes_.set(index, builderForValue.build());
          onChanged();
        } else {
          labelsToNodesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public Builder addLabelsToNodes(org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto value) {
        if (labelsToNodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLabelsToNodesIsMutable();
          labelsToNodes_.add(value);
          onChanged();
        } else {
          labelsToNodesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public Builder addLabelsToNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto value) {
        if (labelsToNodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLabelsToNodesIsMutable();
          labelsToNodes_.add(index, value);
          onChanged();
        } else {
          labelsToNodesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public Builder addLabelsToNodes(
          org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder builderForValue) {
        if (labelsToNodesBuilder_ == null) {
          ensureLabelsToNodesIsMutable();
          labelsToNodes_.add(builderForValue.build());
          onChanged();
        } else {
          labelsToNodesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public Builder addLabelsToNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder builderForValue) {
        if (labelsToNodesBuilder_ == null) {
          ensureLabelsToNodesIsMutable();
          labelsToNodes_.add(index, builderForValue.build());
          onChanged();
        } else {
          labelsToNodesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public Builder addAllLabelsToNodes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto> values) {
        if (labelsToNodesBuilder_ == null) {
          ensureLabelsToNodesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, labelsToNodes_);
          onChanged();
        } else {
          labelsToNodesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public Builder clearLabelsToNodes() {
        if (labelsToNodesBuilder_ == null) {
          labelsToNodes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          labelsToNodesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public Builder removeLabelsToNodes(int index) {
        if (labelsToNodesBuilder_ == null) {
          ensureLabelsToNodesIsMutable();
          labelsToNodes_.remove(index);
          onChanged();
        } else {
          labelsToNodesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder getLabelsToNodesBuilder(
          int index) {
        return getLabelsToNodesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder getLabelsToNodesOrBuilder(
          int index) {
        if (labelsToNodesBuilder_ == null) {
          return labelsToNodes_.get(index);  } else {
          return labelsToNodesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder> 
           getLabelsToNodesOrBuilderList() {
        if (labelsToNodesBuilder_ != null) {
          return labelsToNodesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(labelsToNodes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder addLabelsToNodesBuilder() {
        return getLabelsToNodesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder addLabelsToNodesBuilder(
          int index) {
        return getLabelsToNodesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder> 
           getLabelsToNodesBuilderList() {
        return getLabelsToNodesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder> 
          getLabelsToNodesFieldBuilder() {
        if (labelsToNodesBuilder_ == null) {
          labelsToNodesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder>(
                  labelsToNodes_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          labelsToNodes_ = null;
        }
        return labelsToNodesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetLabelsToNodesResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetLabelsToNodesResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetLabelsToNodesResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetLabelsToNodesResponseProto>() {
      @java.lang.Override
      public GetLabelsToNodesResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetLabelsToNodesResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetLabelsToNodesResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetClusterNodeLabelsRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodeLabelsRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetClusterNodeLabelsRequestProto}
   */
  public static final class GetClusterNodeLabelsRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodeLabelsRequestProto)
      GetClusterNodeLabelsRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetClusterNodeLabelsRequestProto.newBuilder() to construct.
    private GetClusterNodeLabelsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetClusterNodeLabelsRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetClusterNodeLabelsRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetClusterNodeLabelsRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodeLabelsRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodeLabelsRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodeLabelsRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeLabelsRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetClusterNodeLabelsRequestProto>() {
      @java.lang.Override
      public GetClusterNodeLabelsRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeLabelsRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeLabelsRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetClusterNodeLabelsResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodeLabelsResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated string deprecatedNodeLabels = 1;</code>
     * @return A list containing the deprecatedNodeLabels.
     */
    java.util.List<java.lang.String>
        getDeprecatedNodeLabelsList();
    /**
     * <code>repeated string deprecatedNodeLabels = 1;</code>
     * @return The count of deprecatedNodeLabels.
     */
    int getDeprecatedNodeLabelsCount();
    /**
     * <code>repeated string deprecatedNodeLabels = 1;</code>
     * @param index The index of the element to return.
     * @return The deprecatedNodeLabels at the given index.
     */
    java.lang.String getDeprecatedNodeLabels(int index);
    /**
     * <code>repeated string deprecatedNodeLabels = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the deprecatedNodeLabels at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDeprecatedNodeLabelsBytes(int index);

    /**
     * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto> 
        getNodeLabelsList();
    /**
     * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
     */
    int getNodeLabelsCount();
    /**
     * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder> 
        getNodeLabelsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetClusterNodeLabelsResponseProto}
   */
  public static final class GetClusterNodeLabelsResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodeLabelsResponseProto)
      GetClusterNodeLabelsResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetClusterNodeLabelsResponseProto.newBuilder() to construct.
    private GetClusterNodeLabelsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetClusterNodeLabelsResponseProto() {
      deprecatedNodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      nodeLabels_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetClusterNodeLabelsResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.Builder.class);
    }

    public static final int DEPRECATEDNODELABELS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList deprecatedNodeLabels_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string deprecatedNodeLabels = 1;</code>
     * @return A list containing the deprecatedNodeLabels.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getDeprecatedNodeLabelsList() {
      return deprecatedNodeLabels_;
    }
    /**
     * <code>repeated string deprecatedNodeLabels = 1;</code>
     * @return The count of deprecatedNodeLabels.
     */
    public int getDeprecatedNodeLabelsCount() {
      return deprecatedNodeLabels_.size();
    }
    /**
     * <code>repeated string deprecatedNodeLabels = 1;</code>
     * @param index The index of the element to return.
     * @return The deprecatedNodeLabels at the given index.
     */
    public java.lang.String getDeprecatedNodeLabels(int index) {
      return deprecatedNodeLabels_.get(index);
    }
    /**
     * <code>repeated string deprecatedNodeLabels = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the deprecatedNodeLabels at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDeprecatedNodeLabelsBytes(int index) {
      return deprecatedNodeLabels_.getByteString(index);
    }

    public static final int NODELABELS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto> nodeLabels_;
    /**
     * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto> getNodeLabelsList() {
      return nodeLabels_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder> 
        getNodeLabelsOrBuilderList() {
      return nodeLabels_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
     */
    @java.lang.Override
    public int getNodeLabelsCount() {
      return nodeLabels_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index) {
      return nodeLabels_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder(
        int index) {
      return nodeLabels_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < deprecatedNodeLabels_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, deprecatedNodeLabels_.getRaw(i));
      }
      for (int i = 0; i < nodeLabels_.size(); i++) {
        output.writeMessage(2, nodeLabels_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      {
        int dataSize = 0;
        for (int i = 0; i < deprecatedNodeLabels_.size(); i++) {
          dataSize += computeStringSizeNoTag(deprecatedNodeLabels_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getDeprecatedNodeLabelsList().size();
      }
      for (int i = 0; i < nodeLabels_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, nodeLabels_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto) obj;

      if (!getDeprecatedNodeLabelsList()
          .equals(other.getDeprecatedNodeLabelsList())) return false;
      if (!getNodeLabelsList()
          .equals(other.getNodeLabelsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getDeprecatedNodeLabelsCount() > 0) {
        hash = (37 * hash) + DEPRECATEDNODELABELS_FIELD_NUMBER;
        hash = (53 * hash) + getDeprecatedNodeLabelsList().hashCode();
      }
      if (getNodeLabelsCount() > 0) {
        hash = (37 * hash) + NODELABELS_FIELD_NUMBER;
        hash = (53 * hash) + getNodeLabelsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetClusterNodeLabelsResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodeLabelsResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        deprecatedNodeLabels_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        if (nodeLabelsBuilder_ == null) {
          nodeLabels_ = java.util.Collections.emptyList();
        } else {
          nodeLabels_ = null;
          nodeLabelsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto result) {
        if (nodeLabelsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            nodeLabels_ = java.util.Collections.unmodifiableList(nodeLabels_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.nodeLabels_ = nodeLabels_;
        } else {
          result.nodeLabels_ = nodeLabelsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto result) {
        int from_bitField0_ = bitField0_;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          deprecatedNodeLabels_.makeImmutable();
          result.deprecatedNodeLabels_ = deprecatedNodeLabels_;
        }
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.getDefaultInstance()) return this;
        if (!other.deprecatedNodeLabels_.isEmpty()) {
          if (deprecatedNodeLabels_.isEmpty()) {
            deprecatedNodeLabels_ = other.deprecatedNodeLabels_;
            bitField0_ |= 0x00000001;
          } else {
            ensureDeprecatedNodeLabelsIsMutable();
            deprecatedNodeLabels_.addAll(other.deprecatedNodeLabels_);
          }
          onChanged();
        }
        if (nodeLabelsBuilder_ == null) {
          if (!other.nodeLabels_.isEmpty()) {
            if (nodeLabels_.isEmpty()) {
              nodeLabels_ = other.nodeLabels_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureNodeLabelsIsMutable();
              nodeLabels_.addAll(other.nodeLabels_);
            }
            onChanged();
          }
        } else {
          if (!other.nodeLabels_.isEmpty()) {
            if (nodeLabelsBuilder_.isEmpty()) {
              nodeLabelsBuilder_.dispose();
              nodeLabelsBuilder_ = null;
              nodeLabels_ = other.nodeLabels_;
              bitField0_ = (bitField0_ & ~0x00000002);
              nodeLabelsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNodeLabelsFieldBuilder() : null;
            } else {
              nodeLabelsBuilder_.addAllMessages(other.nodeLabels_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureDeprecatedNodeLabelsIsMutable();
                deprecatedNodeLabels_.add(bs);
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.PARSER,
                        extensionRegistry);
                if (nodeLabelsBuilder_ == null) {
                  ensureNodeLabelsIsMutable();
                  nodeLabels_.add(m);
                } else {
                  nodeLabelsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList deprecatedNodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureDeprecatedNodeLabelsIsMutable() {
        if (!deprecatedNodeLabels_.isModifiable()) {
          deprecatedNodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(deprecatedNodeLabels_);
        }
        bitField0_ |= 0x00000001;
      }
      /**
       * <code>repeated string deprecatedNodeLabels = 1;</code>
       * @return A list containing the deprecatedNodeLabels.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getDeprecatedNodeLabelsList() {
        deprecatedNodeLabels_.makeImmutable();
        return deprecatedNodeLabels_;
      }
      /**
       * <code>repeated string deprecatedNodeLabels = 1;</code>
       * @return The count of deprecatedNodeLabels.
       */
      public int getDeprecatedNodeLabelsCount() {
        return deprecatedNodeLabels_.size();
      }
      /**
       * <code>repeated string deprecatedNodeLabels = 1;</code>
       * @param index The index of the element to return.
       * @return The deprecatedNodeLabels at the given index.
       */
      public java.lang.String getDeprecatedNodeLabels(int index) {
        return deprecatedNodeLabels_.get(index);
      }
      /**
       * <code>repeated string deprecatedNodeLabels = 1;</code>
       * @param index The index of the value to return.
       * @return The bytes of the deprecatedNodeLabels at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDeprecatedNodeLabelsBytes(int index) {
        return deprecatedNodeLabels_.getByteString(index);
      }
      /**
       * <code>repeated string deprecatedNodeLabels = 1;</code>
       * @param index The index to set the value at.
       * @param value The deprecatedNodeLabels to set.
       * @return This builder for chaining.
       */
      public Builder setDeprecatedNodeLabels(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureDeprecatedNodeLabelsIsMutable();
        deprecatedNodeLabels_.set(index, value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string deprecatedNodeLabels = 1;</code>
       * @param value The deprecatedNodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addDeprecatedNodeLabels(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureDeprecatedNodeLabelsIsMutable();
        deprecatedNodeLabels_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string deprecatedNodeLabels = 1;</code>
       * @param values The deprecatedNodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addAllDeprecatedNodeLabels(
          java.lang.Iterable<java.lang.String> values) {
        ensureDeprecatedNodeLabelsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, deprecatedNodeLabels_);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string deprecatedNodeLabels = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearDeprecatedNodeLabels() {
        deprecatedNodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000001);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string deprecatedNodeLabels = 1;</code>
       * @param value The bytes of the deprecatedNodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addDeprecatedNodeLabelsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureDeprecatedNodeLabelsIsMutable();
        deprecatedNodeLabels_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto> nodeLabels_ =
        java.util.Collections.emptyList();
      private void ensureNodeLabelsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          nodeLabels_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto>(nodeLabels_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder> nodeLabelsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto> getNodeLabelsList() {
        if (nodeLabelsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nodeLabels_);
        } else {
          return nodeLabelsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public int getNodeLabelsCount() {
        if (nodeLabelsBuilder_ == null) {
          return nodeLabels_.size();
        } else {
          return nodeLabelsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index) {
        if (nodeLabelsBuilder_ == null) {
          return nodeLabels_.get(index);
        } else {
          return nodeLabelsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public Builder setNodeLabels(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) {
        if (nodeLabelsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeLabelsIsMutable();
          nodeLabels_.set(index, value);
          onChanged();
        } else {
          nodeLabelsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public Builder setNodeLabels(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) {
        if (nodeLabelsBuilder_ == null) {
          ensureNodeLabelsIsMutable();
          nodeLabels_.set(index, builderForValue.build());
          onChanged();
        } else {
          nodeLabelsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public Builder addNodeLabels(org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) {
        if (nodeLabelsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeLabelsIsMutable();
          nodeLabels_.add(value);
          onChanged();
        } else {
          nodeLabelsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public Builder addNodeLabels(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) {
        if (nodeLabelsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeLabelsIsMutable();
          nodeLabels_.add(index, value);
          onChanged();
        } else {
          nodeLabelsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public Builder addNodeLabels(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) {
        if (nodeLabelsBuilder_ == null) {
          ensureNodeLabelsIsMutable();
          nodeLabels_.add(builderForValue.build());
          onChanged();
        } else {
          nodeLabelsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public Builder addNodeLabels(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) {
        if (nodeLabelsBuilder_ == null) {
          ensureNodeLabelsIsMutable();
          nodeLabels_.add(index, builderForValue.build());
          onChanged();
        } else {
          nodeLabelsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public Builder addAllNodeLabels(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto> values) {
        if (nodeLabelsBuilder_ == null) {
          ensureNodeLabelsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nodeLabels_);
          onChanged();
        } else {
          nodeLabelsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public Builder clearNodeLabels() {
        if (nodeLabelsBuilder_ == null) {
          nodeLabels_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          nodeLabelsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public Builder removeNodeLabels(int index) {
        if (nodeLabelsBuilder_ == null) {
          ensureNodeLabelsIsMutable();
          nodeLabels_.remove(index);
          onChanged();
        } else {
          nodeLabelsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder getNodeLabelsBuilder(
          int index) {
        return getNodeLabelsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder(
          int index) {
        if (nodeLabelsBuilder_ == null) {
          return nodeLabels_.get(index);  } else {
          return nodeLabelsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder> 
           getNodeLabelsOrBuilderList() {
        if (nodeLabelsBuilder_ != null) {
          return nodeLabelsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nodeLabels_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder addNodeLabelsBuilder() {
        return getNodeLabelsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder addNodeLabelsBuilder(
          int index) {
        return getNodeLabelsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder> 
           getNodeLabelsBuilderList() {
        return getNodeLabelsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder> 
          getNodeLabelsFieldBuilder() {
        if (nodeLabelsBuilder_ == null) {
          nodeLabelsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>(
                  nodeLabels_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          nodeLabels_ = null;
        }
        return nodeLabelsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodeLabelsResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodeLabelsResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeLabelsResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetClusterNodeLabelsResponseProto>() {
      @java.lang.Override
      public GetClusterNodeLabelsResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeLabelsResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeLabelsResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetClusterNodeAttributesRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodeAttributesRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetClusterNodeAttributesRequestProto}
   */
  public static final class GetClusterNodeAttributesRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodeAttributesRequestProto)
      GetClusterNodeAttributesRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetClusterNodeAttributesRequestProto.newBuilder() to construct.
    private GetClusterNodeAttributesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetClusterNodeAttributesRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetClusterNodeAttributesRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetClusterNodeAttributesRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodeAttributesRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodeAttributesRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodeAttributesRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeAttributesRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetClusterNodeAttributesRequestProto>() {
      @java.lang.Override
      public GetClusterNodeAttributesRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeAttributesRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeAttributesRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetClusterNodeAttributesResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodeAttributesResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto> 
        getNodeAttributesList();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getNodeAttributes(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
     */
    int getNodeAttributesCount();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder> 
        getNodeAttributesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder getNodeAttributesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetClusterNodeAttributesResponseProto}
   */
  public static final class GetClusterNodeAttributesResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodeAttributesResponseProto)
      GetClusterNodeAttributesResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetClusterNodeAttributesResponseProto.newBuilder() to construct.
    private GetClusterNodeAttributesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetClusterNodeAttributesResponseProto() {
      nodeAttributes_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetClusterNodeAttributesResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.Builder.class);
    }

    public static final int NODEATTRIBUTES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto> nodeAttributes_;
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto> getNodeAttributesList() {
      return nodeAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder> 
        getNodeAttributesOrBuilderList() {
      return nodeAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
     */
    @java.lang.Override
    public int getNodeAttributesCount() {
      return nodeAttributes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getNodeAttributes(int index) {
      return nodeAttributes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder getNodeAttributesOrBuilder(
        int index) {
      return nodeAttributes_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getNodeAttributesCount(); i++) {
        if (!getNodeAttributes(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < nodeAttributes_.size(); i++) {
        output.writeMessage(1, nodeAttributes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < nodeAttributes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, nodeAttributes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto) obj;

      if (!getNodeAttributesList()
          .equals(other.getNodeAttributesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getNodeAttributesCount() > 0) {
        hash = (37 * hash) + NODEATTRIBUTES_FIELD_NUMBER;
        hash = (53 * hash) + getNodeAttributesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetClusterNodeAttributesResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodeAttributesResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (nodeAttributesBuilder_ == null) {
          nodeAttributes_ = java.util.Collections.emptyList();
        } else {
          nodeAttributes_ = null;
          nodeAttributesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto result) {
        if (nodeAttributesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.nodeAttributes_ = nodeAttributes_;
        } else {
          result.nodeAttributes_ = nodeAttributesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.getDefaultInstance()) return this;
        if (nodeAttributesBuilder_ == null) {
          if (!other.nodeAttributes_.isEmpty()) {
            if (nodeAttributes_.isEmpty()) {
              nodeAttributes_ = other.nodeAttributes_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureNodeAttributesIsMutable();
              nodeAttributes_.addAll(other.nodeAttributes_);
            }
            onChanged();
          }
        } else {
          if (!other.nodeAttributes_.isEmpty()) {
            if (nodeAttributesBuilder_.isEmpty()) {
              nodeAttributesBuilder_.dispose();
              nodeAttributesBuilder_ = null;
              nodeAttributes_ = other.nodeAttributes_;
              bitField0_ = (bitField0_ & ~0x00000001);
              nodeAttributesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNodeAttributesFieldBuilder() : null;
            } else {
              nodeAttributesBuilder_.addAllMessages(other.nodeAttributes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getNodeAttributesCount(); i++) {
          if (!getNodeAttributes(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.PARSER,
                        extensionRegistry);
                if (nodeAttributesBuilder_ == null) {
                  ensureNodeAttributesIsMutable();
                  nodeAttributes_.add(m);
                } else {
                  nodeAttributesBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto> nodeAttributes_ =
        java.util.Collections.emptyList();
      private void ensureNodeAttributesIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          nodeAttributes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto>(nodeAttributes_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder> nodeAttributesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto> getNodeAttributesList() {
        if (nodeAttributesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nodeAttributes_);
        } else {
          return nodeAttributesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public int getNodeAttributesCount() {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.size();
        } else {
          return nodeAttributesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getNodeAttributes(int index) {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.get(index);
        } else {
          return nodeAttributesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public Builder setNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.set(index, value);
          onChanged();
        } else {
          nodeAttributesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public Builder setNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.set(index, builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public Builder addNodeAttributes(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(value);
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public Builder addNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(index, value);
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public Builder addNodeAttributes(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public Builder addNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(index, builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public Builder addAllNodeAttributes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto> values) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nodeAttributes_);
          onChanged();
        } else {
          nodeAttributesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public Builder clearNodeAttributes() {
        if (nodeAttributesBuilder_ == null) {
          nodeAttributes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          nodeAttributesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public Builder removeNodeAttributes(int index) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.remove(index);
          onChanged();
        } else {
          nodeAttributesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder getNodeAttributesBuilder(
          int index) {
        return getNodeAttributesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder getNodeAttributesOrBuilder(
          int index) {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.get(index);  } else {
          return nodeAttributesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder> 
           getNodeAttributesOrBuilderList() {
        if (nodeAttributesBuilder_ != null) {
          return nodeAttributesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nodeAttributes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder addNodeAttributesBuilder() {
        return getNodeAttributesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder addNodeAttributesBuilder(
          int index) {
        return getNodeAttributesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder> 
           getNodeAttributesBuilderList() {
        return getNodeAttributesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder> 
          getNodeAttributesFieldBuilder() {
        if (nodeAttributesBuilder_ == null) {
          nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder>(
                  nodeAttributes_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          nodeAttributes_ = null;
        }
        return nodeAttributesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodeAttributesResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodeAttributesResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeAttributesResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetClusterNodeAttributesResponseProto>() {
      @java.lang.Override
      public GetClusterNodeAttributesResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeAttributesResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetClusterNodeAttributesResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetAttributesToNodesRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAttributesToNodesRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto> 
        getNodeAttributesList();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttributes(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
     */
    int getNodeAttributesCount();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> 
        getNodeAttributesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetAttributesToNodesRequestProto}
   */
  public static final class GetAttributesToNodesRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAttributesToNodesRequestProto)
      GetAttributesToNodesRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetAttributesToNodesRequestProto.newBuilder() to construct.
    private GetAttributesToNodesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetAttributesToNodesRequestProto() {
      nodeAttributes_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetAttributesToNodesRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.Builder.class);
    }

    public static final int NODEATTRIBUTES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto> nodeAttributes_;
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto> getNodeAttributesList() {
      return nodeAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> 
        getNodeAttributesOrBuilderList() {
      return nodeAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
     */
    @java.lang.Override
    public int getNodeAttributesCount() {
      return nodeAttributes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttributes(int index) {
      return nodeAttributes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributesOrBuilder(
        int index) {
      return nodeAttributes_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getNodeAttributesCount(); i++) {
        if (!getNodeAttributes(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < nodeAttributes_.size(); i++) {
        output.writeMessage(1, nodeAttributes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < nodeAttributes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, nodeAttributes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto) obj;

      if (!getNodeAttributesList()
          .equals(other.getNodeAttributesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getNodeAttributesCount() > 0) {
        hash = (37 * hash) + NODEATTRIBUTES_FIELD_NUMBER;
        hash = (53 * hash) + getNodeAttributesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetAttributesToNodesRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAttributesToNodesRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (nodeAttributesBuilder_ == null) {
          nodeAttributes_ = java.util.Collections.emptyList();
        } else {
          nodeAttributes_ = null;
          nodeAttributesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto result) {
        if (nodeAttributesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.nodeAttributes_ = nodeAttributes_;
        } else {
          result.nodeAttributes_ = nodeAttributesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.getDefaultInstance()) return this;
        if (nodeAttributesBuilder_ == null) {
          if (!other.nodeAttributes_.isEmpty()) {
            if (nodeAttributes_.isEmpty()) {
              nodeAttributes_ = other.nodeAttributes_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureNodeAttributesIsMutable();
              nodeAttributes_.addAll(other.nodeAttributes_);
            }
            onChanged();
          }
        } else {
          if (!other.nodeAttributes_.isEmpty()) {
            if (nodeAttributesBuilder_.isEmpty()) {
              nodeAttributesBuilder_.dispose();
              nodeAttributesBuilder_ = null;
              nodeAttributes_ = other.nodeAttributes_;
              bitField0_ = (bitField0_ & ~0x00000001);
              nodeAttributesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNodeAttributesFieldBuilder() : null;
            } else {
              nodeAttributesBuilder_.addAllMessages(other.nodeAttributes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getNodeAttributesCount(); i++) {
          if (!getNodeAttributes(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.PARSER,
                        extensionRegistry);
                if (nodeAttributesBuilder_ == null) {
                  ensureNodeAttributesIsMutable();
                  nodeAttributes_.add(m);
                } else {
                  nodeAttributesBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto> nodeAttributes_ =
        java.util.Collections.emptyList();
      private void ensureNodeAttributesIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          nodeAttributes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto>(nodeAttributes_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> nodeAttributesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto> getNodeAttributesList() {
        if (nodeAttributesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nodeAttributes_);
        } else {
          return nodeAttributesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public int getNodeAttributesCount() {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.size();
        } else {
          return nodeAttributesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttributes(int index) {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.get(index);
        } else {
          return nodeAttributesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public Builder setNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.set(index, value);
          onChanged();
        } else {
          nodeAttributesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public Builder setNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.set(index, builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public Builder addNodeAttributes(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(value);
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public Builder addNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(index, value);
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public Builder addNodeAttributes(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public Builder addNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(index, builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public Builder addAllNodeAttributes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto> values) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nodeAttributes_);
          onChanged();
        } else {
          nodeAttributesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public Builder clearNodeAttributes() {
        if (nodeAttributesBuilder_ == null) {
          nodeAttributes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          nodeAttributesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public Builder removeNodeAttributes(int index) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.remove(index);
          onChanged();
        } else {
          nodeAttributesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder getNodeAttributesBuilder(
          int index) {
        return getNodeAttributesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributesOrBuilder(
          int index) {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.get(index);  } else {
          return nodeAttributesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> 
           getNodeAttributesOrBuilderList() {
        if (nodeAttributesBuilder_ != null) {
          return nodeAttributesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nodeAttributes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder addNodeAttributesBuilder() {
        return getNodeAttributesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder addNodeAttributesBuilder(
          int index) {
        return getNodeAttributesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder> 
           getNodeAttributesBuilderList() {
        return getNodeAttributesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> 
          getNodeAttributesFieldBuilder() {
        if (nodeAttributesBuilder_ == null) {
          nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder>(
                  nodeAttributes_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          nodeAttributes_ = null;
        }
        return nodeAttributesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAttributesToNodesRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAttributesToNodesRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetAttributesToNodesRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetAttributesToNodesRequestProto>() {
      @java.lang.Override
      public GetAttributesToNodesRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetAttributesToNodesRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetAttributesToNodesRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetAttributesToNodesResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAttributesToNodesResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto> 
        getAttributesToNodesList();
    /**
     * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getAttributesToNodes(int index);
    /**
     * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
     */
    int getAttributesToNodesCount();
    /**
     * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder> 
        getAttributesToNodesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder getAttributesToNodesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetAttributesToNodesResponseProto}
   */
  public static final class GetAttributesToNodesResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAttributesToNodesResponseProto)
      GetAttributesToNodesResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetAttributesToNodesResponseProto.newBuilder() to construct.
    private GetAttributesToNodesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetAttributesToNodesResponseProto() {
      attributesToNodes_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetAttributesToNodesResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.Builder.class);
    }

    public static final int ATTRIBUTESTONODES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto> attributesToNodes_;
    /**
     * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto> getAttributesToNodesList() {
      return attributesToNodes_;
    }
    /**
     * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder> 
        getAttributesToNodesOrBuilderList() {
      return attributesToNodes_;
    }
    /**
     * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
     */
    @java.lang.Override
    public int getAttributesToNodesCount() {
      return attributesToNodes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getAttributesToNodes(int index) {
      return attributesToNodes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder getAttributesToNodesOrBuilder(
        int index) {
      return attributesToNodes_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getAttributesToNodesCount(); i++) {
        if (!getAttributesToNodes(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < attributesToNodes_.size(); i++) {
        output.writeMessage(1, attributesToNodes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < attributesToNodes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, attributesToNodes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto) obj;

      if (!getAttributesToNodesList()
          .equals(other.getAttributesToNodesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getAttributesToNodesCount() > 0) {
        hash = (37 * hash) + ATTRIBUTESTONODES_FIELD_NUMBER;
        hash = (53 * hash) + getAttributesToNodesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetAttributesToNodesResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAttributesToNodesResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (attributesToNodesBuilder_ == null) {
          attributesToNodes_ = java.util.Collections.emptyList();
        } else {
          attributesToNodes_ = null;
          attributesToNodesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto result) {
        if (attributesToNodesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            attributesToNodes_ = java.util.Collections.unmodifiableList(attributesToNodes_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.attributesToNodes_ = attributesToNodes_;
        } else {
          result.attributesToNodes_ = attributesToNodesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.getDefaultInstance()) return this;
        if (attributesToNodesBuilder_ == null) {
          if (!other.attributesToNodes_.isEmpty()) {
            if (attributesToNodes_.isEmpty()) {
              attributesToNodes_ = other.attributesToNodes_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureAttributesToNodesIsMutable();
              attributesToNodes_.addAll(other.attributesToNodes_);
            }
            onChanged();
          }
        } else {
          if (!other.attributesToNodes_.isEmpty()) {
            if (attributesToNodesBuilder_.isEmpty()) {
              attributesToNodesBuilder_.dispose();
              attributesToNodesBuilder_ = null;
              attributesToNodes_ = other.attributesToNodes_;
              bitField0_ = (bitField0_ & ~0x00000001);
              attributesToNodesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getAttributesToNodesFieldBuilder() : null;
            } else {
              attributesToNodesBuilder_.addAllMessages(other.attributesToNodes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getAttributesToNodesCount(); i++) {
          if (!getAttributesToNodes(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.PARSER,
                        extensionRegistry);
                if (attributesToNodesBuilder_ == null) {
                  ensureAttributesToNodesIsMutable();
                  attributesToNodes_.add(m);
                } else {
                  attributesToNodesBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto> attributesToNodes_ =
        java.util.Collections.emptyList();
      private void ensureAttributesToNodesIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          attributesToNodes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto>(attributesToNodes_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder> attributesToNodesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto> getAttributesToNodesList() {
        if (attributesToNodesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(attributesToNodes_);
        } else {
          return attributesToNodesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public int getAttributesToNodesCount() {
        if (attributesToNodesBuilder_ == null) {
          return attributesToNodes_.size();
        } else {
          return attributesToNodesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getAttributesToNodes(int index) {
        if (attributesToNodesBuilder_ == null) {
          return attributesToNodes_.get(index);
        } else {
          return attributesToNodesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public Builder setAttributesToNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto value) {
        if (attributesToNodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAttributesToNodesIsMutable();
          attributesToNodes_.set(index, value);
          onChanged();
        } else {
          attributesToNodesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public Builder setAttributesToNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder builderForValue) {
        if (attributesToNodesBuilder_ == null) {
          ensureAttributesToNodesIsMutable();
          attributesToNodes_.set(index, builderForValue.build());
          onChanged();
        } else {
          attributesToNodesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public Builder addAttributesToNodes(org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto value) {
        if (attributesToNodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAttributesToNodesIsMutable();
          attributesToNodes_.add(value);
          onChanged();
        } else {
          attributesToNodesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public Builder addAttributesToNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto value) {
        if (attributesToNodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAttributesToNodesIsMutable();
          attributesToNodes_.add(index, value);
          onChanged();
        } else {
          attributesToNodesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public Builder addAttributesToNodes(
          org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder builderForValue) {
        if (attributesToNodesBuilder_ == null) {
          ensureAttributesToNodesIsMutable();
          attributesToNodes_.add(builderForValue.build());
          onChanged();
        } else {
          attributesToNodesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public Builder addAttributesToNodes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder builderForValue) {
        if (attributesToNodesBuilder_ == null) {
          ensureAttributesToNodesIsMutable();
          attributesToNodes_.add(index, builderForValue.build());
          onChanged();
        } else {
          attributesToNodesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public Builder addAllAttributesToNodes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto> values) {
        if (attributesToNodesBuilder_ == null) {
          ensureAttributesToNodesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, attributesToNodes_);
          onChanged();
        } else {
          attributesToNodesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public Builder clearAttributesToNodes() {
        if (attributesToNodesBuilder_ == null) {
          attributesToNodes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          attributesToNodesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public Builder removeAttributesToNodes(int index) {
        if (attributesToNodesBuilder_ == null) {
          ensureAttributesToNodesIsMutable();
          attributesToNodes_.remove(index);
          onChanged();
        } else {
          attributesToNodesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder getAttributesToNodesBuilder(
          int index) {
        return getAttributesToNodesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder getAttributesToNodesOrBuilder(
          int index) {
        if (attributesToNodesBuilder_ == null) {
          return attributesToNodes_.get(index);  } else {
          return attributesToNodesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder> 
           getAttributesToNodesOrBuilderList() {
        if (attributesToNodesBuilder_ != null) {
          return attributesToNodesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(attributesToNodes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder addAttributesToNodesBuilder() {
        return getAttributesToNodesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder addAttributesToNodesBuilder(
          int index) {
        return getAttributesToNodesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder> 
           getAttributesToNodesBuilderList() {
        return getAttributesToNodesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder> 
          getAttributesToNodesFieldBuilder() {
        if (attributesToNodesBuilder_ == null) {
          attributesToNodesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder>(
                  attributesToNodes_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          attributesToNodes_ = null;
        }
        return attributesToNodesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAttributesToNodesResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAttributesToNodesResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetAttributesToNodesResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetAttributesToNodesResponseProto>() {
      @java.lang.Override
      public GetAttributesToNodesResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetAttributesToNodesResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetAttributesToNodesResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetNodesToAttributesRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNodesToAttributesRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated string hostnames = 1;</code>
     * @return A list containing the hostnames.
     */
    java.util.List<java.lang.String>
        getHostnamesList();
    /**
     * <code>repeated string hostnames = 1;</code>
     * @return The count of hostnames.
     */
    int getHostnamesCount();
    /**
     * <code>repeated string hostnames = 1;</code>
     * @param index The index of the element to return.
     * @return The hostnames at the given index.
     */
    java.lang.String getHostnames(int index);
    /**
     * <code>repeated string hostnames = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the hostnames at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostnamesBytes(int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetNodesToAttributesRequestProto}
   */
  public static final class GetNodesToAttributesRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNodesToAttributesRequestProto)
      GetNodesToAttributesRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetNodesToAttributesRequestProto.newBuilder() to construct.
    private GetNodesToAttributesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetNodesToAttributesRequestProto() {
      hostnames_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetNodesToAttributesRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.Builder.class);
    }

    public static final int HOSTNAMES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList hostnames_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string hostnames = 1;</code>
     * @return A list containing the hostnames.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getHostnamesList() {
      return hostnames_;
    }
    /**
     * <code>repeated string hostnames = 1;</code>
     * @return The count of hostnames.
     */
    public int getHostnamesCount() {
      return hostnames_.size();
    }
    /**
     * <code>repeated string hostnames = 1;</code>
     * @param index The index of the element to return.
     * @return The hostnames at the given index.
     */
    public java.lang.String getHostnames(int index) {
      return hostnames_.get(index);
    }
    /**
     * <code>repeated string hostnames = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the hostnames at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostnamesBytes(int index) {
      return hostnames_.getByteString(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < hostnames_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, hostnames_.getRaw(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      {
        int dataSize = 0;
        for (int i = 0; i < hostnames_.size(); i++) {
          dataSize += computeStringSizeNoTag(hostnames_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getHostnamesList().size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto) obj;

      if (!getHostnamesList()
          .equals(other.getHostnamesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getHostnamesCount() > 0) {
        hash = (37 * hash) + HOSTNAMES_FIELD_NUMBER;
        hash = (53 * hash) + getHostnamesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetNodesToAttributesRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNodesToAttributesRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        hostnames_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto result) {
        int from_bitField0_ = bitField0_;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          hostnames_.makeImmutable();
          result.hostnames_ = hostnames_;
        }
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.getDefaultInstance()) return this;
        if (!other.hostnames_.isEmpty()) {
          if (hostnames_.isEmpty()) {
            hostnames_ = other.hostnames_;
            bitField0_ |= 0x00000001;
          } else {
            ensureHostnamesIsMutable();
            hostnames_.addAll(other.hostnames_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureHostnamesIsMutable();
                hostnames_.add(bs);
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList hostnames_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureHostnamesIsMutable() {
        if (!hostnames_.isModifiable()) {
          hostnames_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(hostnames_);
        }
        bitField0_ |= 0x00000001;
      }
      /**
       * <code>repeated string hostnames = 1;</code>
       * @return A list containing the hostnames.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getHostnamesList() {
        hostnames_.makeImmutable();
        return hostnames_;
      }
      /**
       * <code>repeated string hostnames = 1;</code>
       * @return The count of hostnames.
       */
      public int getHostnamesCount() {
        return hostnames_.size();
      }
      /**
       * <code>repeated string hostnames = 1;</code>
       * @param index The index of the element to return.
       * @return The hostnames at the given index.
       */
      public java.lang.String getHostnames(int index) {
        return hostnames_.get(index);
      }
      /**
       * <code>repeated string hostnames = 1;</code>
       * @param index The index of the value to return.
       * @return The bytes of the hostnames at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHostnamesBytes(int index) {
        return hostnames_.getByteString(index);
      }
      /**
       * <code>repeated string hostnames = 1;</code>
       * @param index The index to set the value at.
       * @param value The hostnames to set.
       * @return This builder for chaining.
       */
      public Builder setHostnames(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureHostnamesIsMutable();
        hostnames_.set(index, value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string hostnames = 1;</code>
       * @param value The hostnames to add.
       * @return This builder for chaining.
       */
      public Builder addHostnames(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureHostnamesIsMutable();
        hostnames_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string hostnames = 1;</code>
       * @param values The hostnames to add.
       * @return This builder for chaining.
       */
      public Builder addAllHostnames(
          java.lang.Iterable<java.lang.String> values) {
        ensureHostnamesIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, hostnames_);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string hostnames = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearHostnames() {
        hostnames_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000001);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string hostnames = 1;</code>
       * @param value The bytes of the hostnames to add.
       * @return This builder for chaining.
       */
      public Builder addHostnamesBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureHostnamesIsMutable();
        hostnames_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNodesToAttributesRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNodesToAttributesRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToAttributesRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetNodesToAttributesRequestProto>() {
      @java.lang.Override
      public GetNodesToAttributesRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToAttributesRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToAttributesRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetNodesToAttributesResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNodesToAttributesResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto> 
        getNodesToAttributesList();
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getNodesToAttributes(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
     */
    int getNodesToAttributesCount();
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder> 
        getNodesToAttributesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder getNodesToAttributesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetNodesToAttributesResponseProto}
   */
  public static final class GetNodesToAttributesResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNodesToAttributesResponseProto)
      GetNodesToAttributesResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetNodesToAttributesResponseProto.newBuilder() to construct.
    private GetNodesToAttributesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetNodesToAttributesResponseProto() {
      nodesToAttributes_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetNodesToAttributesResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.Builder.class);
    }

    public static final int NODESTOATTRIBUTES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto> nodesToAttributes_;
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto> getNodesToAttributesList() {
      return nodesToAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder> 
        getNodesToAttributesOrBuilderList() {
      return nodesToAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
     */
    @java.lang.Override
    public int getNodesToAttributesCount() {
      return nodesToAttributes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getNodesToAttributes(int index) {
      return nodesToAttributes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder getNodesToAttributesOrBuilder(
        int index) {
      return nodesToAttributes_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getNodesToAttributesCount(); i++) {
        if (!getNodesToAttributes(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < nodesToAttributes_.size(); i++) {
        output.writeMessage(1, nodesToAttributes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < nodesToAttributes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, nodesToAttributes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto) obj;

      if (!getNodesToAttributesList()
          .equals(other.getNodesToAttributesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getNodesToAttributesCount() > 0) {
        hash = (37 * hash) + NODESTOATTRIBUTES_FIELD_NUMBER;
        hash = (53 * hash) + getNodesToAttributesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetNodesToAttributesResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNodesToAttributesResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (nodesToAttributesBuilder_ == null) {
          nodesToAttributes_ = java.util.Collections.emptyList();
        } else {
          nodesToAttributes_ = null;
          nodesToAttributesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto result) {
        if (nodesToAttributesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            nodesToAttributes_ = java.util.Collections.unmodifiableList(nodesToAttributes_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.nodesToAttributes_ = nodesToAttributes_;
        } else {
          result.nodesToAttributes_ = nodesToAttributesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.getDefaultInstance()) return this;
        if (nodesToAttributesBuilder_ == null) {
          if (!other.nodesToAttributes_.isEmpty()) {
            if (nodesToAttributes_.isEmpty()) {
              nodesToAttributes_ = other.nodesToAttributes_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureNodesToAttributesIsMutable();
              nodesToAttributes_.addAll(other.nodesToAttributes_);
            }
            onChanged();
          }
        } else {
          if (!other.nodesToAttributes_.isEmpty()) {
            if (nodesToAttributesBuilder_.isEmpty()) {
              nodesToAttributesBuilder_.dispose();
              nodesToAttributesBuilder_ = null;
              nodesToAttributes_ = other.nodesToAttributes_;
              bitField0_ = (bitField0_ & ~0x00000001);
              nodesToAttributesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNodesToAttributesFieldBuilder() : null;
            } else {
              nodesToAttributesBuilder_.addAllMessages(other.nodesToAttributes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getNodesToAttributesCount(); i++) {
          if (!getNodesToAttributes(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.PARSER,
                        extensionRegistry);
                if (nodesToAttributesBuilder_ == null) {
                  ensureNodesToAttributesIsMutable();
                  nodesToAttributes_.add(m);
                } else {
                  nodesToAttributesBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto> nodesToAttributes_ =
        java.util.Collections.emptyList();
      private void ensureNodesToAttributesIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          nodesToAttributes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto>(nodesToAttributes_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder> nodesToAttributesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto> getNodesToAttributesList() {
        if (nodesToAttributesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nodesToAttributes_);
        } else {
          return nodesToAttributesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public int getNodesToAttributesCount() {
        if (nodesToAttributesBuilder_ == null) {
          return nodesToAttributes_.size();
        } else {
          return nodesToAttributesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getNodesToAttributes(int index) {
        if (nodesToAttributesBuilder_ == null) {
          return nodesToAttributes_.get(index);
        } else {
          return nodesToAttributesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public Builder setNodesToAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto value) {
        if (nodesToAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodesToAttributesIsMutable();
          nodesToAttributes_.set(index, value);
          onChanged();
        } else {
          nodesToAttributesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public Builder setNodesToAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder builderForValue) {
        if (nodesToAttributesBuilder_ == null) {
          ensureNodesToAttributesIsMutable();
          nodesToAttributes_.set(index, builderForValue.build());
          onChanged();
        } else {
          nodesToAttributesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public Builder addNodesToAttributes(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto value) {
        if (nodesToAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodesToAttributesIsMutable();
          nodesToAttributes_.add(value);
          onChanged();
        } else {
          nodesToAttributesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public Builder addNodesToAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto value) {
        if (nodesToAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodesToAttributesIsMutable();
          nodesToAttributes_.add(index, value);
          onChanged();
        } else {
          nodesToAttributesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public Builder addNodesToAttributes(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder builderForValue) {
        if (nodesToAttributesBuilder_ == null) {
          ensureNodesToAttributesIsMutable();
          nodesToAttributes_.add(builderForValue.build());
          onChanged();
        } else {
          nodesToAttributesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public Builder addNodesToAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder builderForValue) {
        if (nodesToAttributesBuilder_ == null) {
          ensureNodesToAttributesIsMutable();
          nodesToAttributes_.add(index, builderForValue.build());
          onChanged();
        } else {
          nodesToAttributesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public Builder addAllNodesToAttributes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto> values) {
        if (nodesToAttributesBuilder_ == null) {
          ensureNodesToAttributesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nodesToAttributes_);
          onChanged();
        } else {
          nodesToAttributesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public Builder clearNodesToAttributes() {
        if (nodesToAttributesBuilder_ == null) {
          nodesToAttributes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          nodesToAttributesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public Builder removeNodesToAttributes(int index) {
        if (nodesToAttributesBuilder_ == null) {
          ensureNodesToAttributesIsMutable();
          nodesToAttributes_.remove(index);
          onChanged();
        } else {
          nodesToAttributesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder getNodesToAttributesBuilder(
          int index) {
        return getNodesToAttributesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder getNodesToAttributesOrBuilder(
          int index) {
        if (nodesToAttributesBuilder_ == null) {
          return nodesToAttributes_.get(index);  } else {
          return nodesToAttributesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder> 
           getNodesToAttributesOrBuilderList() {
        if (nodesToAttributesBuilder_ != null) {
          return nodesToAttributesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nodesToAttributes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder addNodesToAttributesBuilder() {
        return getNodesToAttributesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder addNodesToAttributesBuilder(
          int index) {
        return getNodesToAttributesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder> 
           getNodesToAttributesBuilderList() {
        return getNodesToAttributesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder> 
          getNodesToAttributesFieldBuilder() {
        if (nodesToAttributesBuilder_ == null) {
          nodesToAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder>(
                  nodesToAttributes_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          nodesToAttributes_ = null;
        }
        return nodesToAttributesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNodesToAttributesResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNodesToAttributesResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToAttributesResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetNodesToAttributesResponseProto>() {
      @java.lang.Override
      public GetNodesToAttributesResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToAttributesResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetNodesToAttributesResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface UpdateApplicationPriorityRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateApplicationPriorityRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
     * @return Whether the applicationPriority field is set.
     */
    boolean hasApplicationPriority();
    /**
     * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
     * @return The applicationPriority.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority();
    /**
     * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.UpdateApplicationPriorityRequestProto}
   */
  public static final class UpdateApplicationPriorityRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateApplicationPriorityRequestProto)
      UpdateApplicationPriorityRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use UpdateApplicationPriorityRequestProto.newBuilder() to construct.
    private UpdateApplicationPriorityRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private UpdateApplicationPriorityRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new UpdateApplicationPriorityRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATIONID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int APPLICATIONPRIORITY_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_;
    /**
     * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
     * @return Whether the applicationPriority field is set.
     */
    @java.lang.Override
    public boolean hasApplicationPriority() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
     * @return The applicationPriority.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() {
      return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
    }
    /**
     * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() {
      return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasApplicationId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasApplicationPriority()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getApplicationPriority());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getApplicationPriority());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasApplicationPriority() != other.hasApplicationPriority()) return false;
      if (hasApplicationPriority()) {
        if (!getApplicationPriority()
            .equals(other.getApplicationPriority())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasApplicationPriority()) {
        hash = (37 * hash) + APPLICATIONPRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationPriority().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.UpdateApplicationPriorityRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateApplicationPriorityRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
          getApplicationPriorityFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        applicationPriority_ = null;
        if (applicationPriorityBuilder_ != null) {
          applicationPriorityBuilder_.dispose();
          applicationPriorityBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.applicationPriority_ = applicationPriorityBuilder_ == null
              ? applicationPriority_
              : applicationPriorityBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasApplicationPriority()) {
          mergeApplicationPriority(other.getApplicationPriority());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasApplicationId()) {
          return false;
        }
        if (!hasApplicationPriority()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getApplicationPriorityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> applicationPriorityBuilder_;
      /**
       * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
       * @return Whether the applicationPriority field is set.
       */
      public boolean hasApplicationPriority() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
       * @return The applicationPriority.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() {
        if (applicationPriorityBuilder_ == null) {
          return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
        } else {
          return applicationPriorityBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
       */
      public Builder setApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (applicationPriorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationPriority_ = value;
        } else {
          applicationPriorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
       */
      public Builder setApplicationPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (applicationPriorityBuilder_ == null) {
          applicationPriority_ = builderForValue.build();
        } else {
          applicationPriorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
       */
      public Builder mergeApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (applicationPriorityBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            applicationPriority_ != null &&
            applicationPriority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            getApplicationPriorityBuilder().mergeFrom(value);
          } else {
            applicationPriority_ = value;
          }
        } else {
          applicationPriorityBuilder_.mergeFrom(value);
        }
        if (applicationPriority_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
       */
      public Builder clearApplicationPriority() {
        bitField0_ = (bitField0_ & ~0x00000002);
        applicationPriority_ = null;
        if (applicationPriorityBuilder_ != null) {
          applicationPriorityBuilder_.dispose();
          applicationPriorityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getApplicationPriorityBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getApplicationPriorityFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() {
        if (applicationPriorityBuilder_ != null) {
          return applicationPriorityBuilder_.getMessageOrBuilder();
        } else {
          return applicationPriority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
        }
      }
      /**
       * <code>required .hadoop.yarn.PriorityProto applicationPriority = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getApplicationPriorityFieldBuilder() {
        if (applicationPriorityBuilder_ == null) {
          applicationPriorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getApplicationPriority(),
                  getParentForChildren(),
                  isClean());
          applicationPriority_ = null;
        }
        return applicationPriorityBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateApplicationPriorityRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateApplicationPriorityRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationPriorityRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<UpdateApplicationPriorityRequestProto>() {
      @java.lang.Override
      public UpdateApplicationPriorityRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationPriorityRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationPriorityRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface UpdateApplicationPriorityResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateApplicationPriorityResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
     * @return Whether the applicationPriority field is set.
     */
    boolean hasApplicationPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
     * @return The applicationPriority.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.UpdateApplicationPriorityResponseProto}
   */
  public static final class UpdateApplicationPriorityResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateApplicationPriorityResponseProto)
      UpdateApplicationPriorityResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use UpdateApplicationPriorityResponseProto.newBuilder() to construct.
    private UpdateApplicationPriorityResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private UpdateApplicationPriorityResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new UpdateApplicationPriorityResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATIONPRIORITY_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_;
    /**
     * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
     * @return Whether the applicationPriority field is set.
     */
    @java.lang.Override
    public boolean hasApplicationPriority() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
     * @return The applicationPriority.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() {
      return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() {
      return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationPriority());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationPriority());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto) obj;

      if (hasApplicationPriority() != other.hasApplicationPriority()) return false;
      if (hasApplicationPriority()) {
        if (!getApplicationPriority()
            .equals(other.getApplicationPriority())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationPriority()) {
        hash = (37 * hash) + APPLICATIONPRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationPriority().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.UpdateApplicationPriorityResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateApplicationPriorityResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationPriorityFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationPriority_ = null;
        if (applicationPriorityBuilder_ != null) {
          applicationPriorityBuilder_.dispose();
          applicationPriorityBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationPriority_ = applicationPriorityBuilder_ == null
              ? applicationPriority_
              : applicationPriorityBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.getDefaultInstance()) return this;
        if (other.hasApplicationPriority()) {
          mergeApplicationPriority(other.getApplicationPriority());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationPriorityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> applicationPriorityBuilder_;
      /**
       * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
       * @return Whether the applicationPriority field is set.
       */
      public boolean hasApplicationPriority() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
       * @return The applicationPriority.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() {
        if (applicationPriorityBuilder_ == null) {
          return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
        } else {
          return applicationPriorityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
       */
      public Builder setApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (applicationPriorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationPriority_ = value;
        } else {
          applicationPriorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
       */
      public Builder setApplicationPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (applicationPriorityBuilder_ == null) {
          applicationPriority_ = builderForValue.build();
        } else {
          applicationPriorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
       */
      public Builder mergeApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (applicationPriorityBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationPriority_ != null &&
            applicationPriority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            getApplicationPriorityBuilder().mergeFrom(value);
          } else {
            applicationPriority_ = value;
          }
        } else {
          applicationPriorityBuilder_.mergeFrom(value);
        }
        if (applicationPriority_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
       */
      public Builder clearApplicationPriority() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationPriority_ = null;
        if (applicationPriorityBuilder_ != null) {
          applicationPriorityBuilder_.dispose();
          applicationPriorityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getApplicationPriorityBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationPriorityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() {
        if (applicationPriorityBuilder_ != null) {
          return applicationPriorityBuilder_.getMessageOrBuilder();
        } else {
          return applicationPriority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto applicationPriority = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getApplicationPriorityFieldBuilder() {
        if (applicationPriorityBuilder_ == null) {
          applicationPriorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getApplicationPriority(),
                  getParentForChildren(),
                  isClean());
          applicationPriority_ = null;
        }
        return applicationPriorityBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateApplicationPriorityResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateApplicationPriorityResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationPriorityResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<UpdateApplicationPriorityResponseProto>() {
      @java.lang.Override
      public UpdateApplicationPriorityResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationPriorityResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationPriorityResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface SignalContainerRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.SignalContainerRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    boolean hasContainerId();
    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * <code>required .hadoop.yarn.SignalContainerCommandProto command = 2;</code>
     * @return Whether the command field is set.
     */
    boolean hasCommand();
    /**
     * <code>required .hadoop.yarn.SignalContainerCommandProto command = 2;</code>
     * @return The command.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto getCommand();
  }
  /**
   * Protobuf type {@code hadoop.yarn.SignalContainerRequestProto}
   */
  public static final class SignalContainerRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.SignalContainerRequestProto)
      SignalContainerRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use SignalContainerRequestProto.newBuilder() to construct.
    private SignalContainerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private SignalContainerRequestProto() {
      command_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new SignalContainerRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    @java.lang.Override
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int COMMAND_FIELD_NUMBER = 2;
    private int command_ = 1;
    /**
     * <code>required .hadoop.yarn.SignalContainerCommandProto command = 2;</code>
     * @return Whether the command field is set.
     */
    @java.lang.Override public boolean hasCommand() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required .hadoop.yarn.SignalContainerCommandProto command = 2;</code>
     * @return The command.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto getCommand() {
      org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto result = org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto.forNumber(command_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto.OUTPUT_THREAD_DUMP : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasContainerId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasCommand()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeEnum(2, command_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(2, command_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (hasCommand() != other.hasCommand()) return false;
      if (hasCommand()) {
        if (command_ != other.command_) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (hasCommand()) {
        hash = (37 * hash) + COMMAND_FIELD_NUMBER;
        hash = (53 * hash) + command_;
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.SignalContainerRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.SignalContainerRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        command_ = 1;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerId_ = containerIdBuilder_ == null
              ? containerId_
              : containerIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.command_ = command_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (other.hasCommand()) {
          setCommand(other.getCommand());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasContainerId()) {
          return false;
        }
        if (!hasCommand()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(2, tmpRaw);
                } else {
                  command_ = tmpRaw;
                  bitField0_ |= 0x00000002;
                }
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return Whether the containerId field is set.
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return The containerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerId_ != null &&
            containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getContainerIdBuilder().mergeFrom(value);
          } else {
            containerId_ = value;
          }
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        if (containerId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * <code>required .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private int command_ = 1;
      /**
       * <code>required .hadoop.yarn.SignalContainerCommandProto command = 2;</code>
       * @return Whether the command field is set.
       */
      @java.lang.Override public boolean hasCommand() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required .hadoop.yarn.SignalContainerCommandProto command = 2;</code>
       * @return The command.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto getCommand() {
        org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto result = org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto.forNumber(command_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto.OUTPUT_THREAD_DUMP : result;
      }
      /**
       * <code>required .hadoop.yarn.SignalContainerCommandProto command = 2;</code>
       * @param value The command to set.
       * @return This builder for chaining.
       */
      public Builder setCommand(org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        command_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.SignalContainerCommandProto command = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearCommand() {
        bitField0_ = (bitField0_ & ~0x00000002);
        command_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.SignalContainerRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.SignalContainerRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<SignalContainerRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<SignalContainerRequestProto>() {
      @java.lang.Override
      public SignalContainerRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<SignalContainerRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<SignalContainerRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface SignalContainerResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.SignalContainerResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.SignalContainerResponseProto}
   */
  public static final class SignalContainerResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.SignalContainerResponseProto)
      SignalContainerResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use SignalContainerResponseProto.newBuilder() to construct.
    private SignalContainerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private SignalContainerResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new SignalContainerResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.SignalContainerResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.SignalContainerResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.SignalContainerResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.SignalContainerResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<SignalContainerResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<SignalContainerResponseProto>() {
      @java.lang.Override
      public SignalContainerResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<SignalContainerResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<SignalContainerResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface UpdateApplicationTimeoutsRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateApplicationTimeoutsRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> 
        getApplicationTimeoutsList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index);
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
     */
    int getApplicationTimeoutsCount();
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> 
        getApplicationTimeoutsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.UpdateApplicationTimeoutsRequestProto}
   */
  public static final class UpdateApplicationTimeoutsRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateApplicationTimeoutsRequestProto)
      UpdateApplicationTimeoutsRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use UpdateApplicationTimeoutsRequestProto.newBuilder() to construct.
    private UpdateApplicationTimeoutsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private UpdateApplicationTimeoutsRequestProto() {
      applicationTimeouts_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new UpdateApplicationTimeoutsRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATIONID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int APPLICATION_TIMEOUTS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> applicationTimeouts_;
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> getApplicationTimeoutsList() {
      return applicationTimeouts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> 
        getApplicationTimeoutsOrBuilderList() {
      return applicationTimeouts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
     */
    @java.lang.Override
    public int getApplicationTimeoutsCount() {
      return applicationTimeouts_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index) {
      return applicationTimeouts_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder(
        int index) {
      return applicationTimeouts_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasApplicationId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      for (int i = 0; i < applicationTimeouts_.size(); i++) {
        output.writeMessage(2, applicationTimeouts_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      for (int i = 0; i < applicationTimeouts_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, applicationTimeouts_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (!getApplicationTimeoutsList()
          .equals(other.getApplicationTimeoutsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (getApplicationTimeoutsCount() > 0) {
        hash = (37 * hash) + APPLICATION_TIMEOUTS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationTimeoutsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.UpdateApplicationTimeoutsRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateApplicationTimeoutsRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
          getApplicationTimeoutsFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        if (applicationTimeoutsBuilder_ == null) {
          applicationTimeouts_ = java.util.Collections.emptyList();
        } else {
          applicationTimeouts_ = null;
          applicationTimeoutsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto result) {
        if (applicationTimeoutsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            applicationTimeouts_ = java.util.Collections.unmodifiableList(applicationTimeouts_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.applicationTimeouts_ = applicationTimeouts_;
        } else {
          result.applicationTimeouts_ = applicationTimeoutsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (applicationTimeoutsBuilder_ == null) {
          if (!other.applicationTimeouts_.isEmpty()) {
            if (applicationTimeouts_.isEmpty()) {
              applicationTimeouts_ = other.applicationTimeouts_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureApplicationTimeoutsIsMutable();
              applicationTimeouts_.addAll(other.applicationTimeouts_);
            }
            onChanged();
          }
        } else {
          if (!other.applicationTimeouts_.isEmpty()) {
            if (applicationTimeoutsBuilder_.isEmpty()) {
              applicationTimeoutsBuilder_.dispose();
              applicationTimeoutsBuilder_ = null;
              applicationTimeouts_ = other.applicationTimeouts_;
              bitField0_ = (bitField0_ & ~0x00000002);
              applicationTimeoutsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationTimeoutsFieldBuilder() : null;
            } else {
              applicationTimeoutsBuilder_.addAllMessages(other.applicationTimeouts_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasApplicationId()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.PARSER,
                        extensionRegistry);
                if (applicationTimeoutsBuilder_ == null) {
                  ensureApplicationTimeoutsIsMutable();
                  applicationTimeouts_.add(m);
                } else {
                  applicationTimeoutsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>required .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> applicationTimeouts_ =
        java.util.Collections.emptyList();
      private void ensureApplicationTimeoutsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          applicationTimeouts_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto>(applicationTimeouts_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> applicationTimeoutsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> getApplicationTimeoutsList() {
        if (applicationTimeoutsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applicationTimeouts_);
        } else {
          return applicationTimeoutsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public int getApplicationTimeoutsCount() {
        if (applicationTimeoutsBuilder_ == null) {
          return applicationTimeouts_.size();
        } else {
          return applicationTimeoutsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index) {
        if (applicationTimeoutsBuilder_ == null) {
          return applicationTimeouts_.get(index);
        } else {
          return applicationTimeoutsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public Builder setApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) {
        if (applicationTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.set(index, value);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public Builder setApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationTimeoutsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public Builder addApplicationTimeouts(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) {
        if (applicationTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(value);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public Builder addApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) {
        if (applicationTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(index, value);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public Builder addApplicationTimeouts(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(builderForValue.build());
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public Builder addApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public Builder addAllApplicationTimeouts(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> values) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applicationTimeouts_);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public Builder clearApplicationTimeouts() {
        if (applicationTimeoutsBuilder_ == null) {
          applicationTimeouts_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public Builder removeApplicationTimeouts(int index) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.remove(index);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder getApplicationTimeoutsBuilder(
          int index) {
        return getApplicationTimeoutsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder(
          int index) {
        if (applicationTimeoutsBuilder_ == null) {
          return applicationTimeouts_.get(index);  } else {
          return applicationTimeoutsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> 
           getApplicationTimeoutsOrBuilderList() {
        if (applicationTimeoutsBuilder_ != null) {
          return applicationTimeoutsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applicationTimeouts_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder addApplicationTimeoutsBuilder() {
        return getApplicationTimeoutsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder addApplicationTimeoutsBuilder(
          int index) {
        return getApplicationTimeoutsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder> 
           getApplicationTimeoutsBuilderList() {
        return getApplicationTimeoutsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> 
          getApplicationTimeoutsFieldBuilder() {
        if (applicationTimeoutsBuilder_ == null) {
          applicationTimeoutsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder>(
                  applicationTimeouts_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          applicationTimeouts_ = null;
        }
        return applicationTimeoutsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateApplicationTimeoutsRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateApplicationTimeoutsRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationTimeoutsRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<UpdateApplicationTimeoutsRequestProto>() {
      @java.lang.Override
      public UpdateApplicationTimeoutsRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationTimeoutsRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationTimeoutsRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface UpdateApplicationTimeoutsResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateApplicationTimeoutsResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> 
        getApplicationTimeoutsList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index);
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
     */
    int getApplicationTimeoutsCount();
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> 
        getApplicationTimeoutsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.UpdateApplicationTimeoutsResponseProto}
   */
  public static final class UpdateApplicationTimeoutsResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateApplicationTimeoutsResponseProto)
      UpdateApplicationTimeoutsResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use UpdateApplicationTimeoutsResponseProto.newBuilder() to construct.
    private UpdateApplicationTimeoutsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private UpdateApplicationTimeoutsResponseProto() {
      applicationTimeouts_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new UpdateApplicationTimeoutsResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.Builder.class);
    }

    public static final int APPLICATION_TIMEOUTS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> applicationTimeouts_;
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> getApplicationTimeoutsList() {
      return applicationTimeouts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> 
        getApplicationTimeoutsOrBuilderList() {
      return applicationTimeouts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
     */
    @java.lang.Override
    public int getApplicationTimeoutsCount() {
      return applicationTimeouts_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index) {
      return applicationTimeouts_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder(
        int index) {
      return applicationTimeouts_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < applicationTimeouts_.size(); i++) {
        output.writeMessage(1, applicationTimeouts_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < applicationTimeouts_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, applicationTimeouts_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto) obj;

      if (!getApplicationTimeoutsList()
          .equals(other.getApplicationTimeoutsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getApplicationTimeoutsCount() > 0) {
        hash = (37 * hash) + APPLICATION_TIMEOUTS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationTimeoutsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.UpdateApplicationTimeoutsResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateApplicationTimeoutsResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (applicationTimeoutsBuilder_ == null) {
          applicationTimeouts_ = java.util.Collections.emptyList();
        } else {
          applicationTimeouts_ = null;
          applicationTimeoutsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto result) {
        if (applicationTimeoutsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            applicationTimeouts_ = java.util.Collections.unmodifiableList(applicationTimeouts_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.applicationTimeouts_ = applicationTimeouts_;
        } else {
          result.applicationTimeouts_ = applicationTimeoutsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.getDefaultInstance()) return this;
        if (applicationTimeoutsBuilder_ == null) {
          if (!other.applicationTimeouts_.isEmpty()) {
            if (applicationTimeouts_.isEmpty()) {
              applicationTimeouts_ = other.applicationTimeouts_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureApplicationTimeoutsIsMutable();
              applicationTimeouts_.addAll(other.applicationTimeouts_);
            }
            onChanged();
          }
        } else {
          if (!other.applicationTimeouts_.isEmpty()) {
            if (applicationTimeoutsBuilder_.isEmpty()) {
              applicationTimeoutsBuilder_.dispose();
              applicationTimeoutsBuilder_ = null;
              applicationTimeouts_ = other.applicationTimeouts_;
              bitField0_ = (bitField0_ & ~0x00000001);
              applicationTimeoutsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationTimeoutsFieldBuilder() : null;
            } else {
              applicationTimeoutsBuilder_.addAllMessages(other.applicationTimeouts_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.PARSER,
                        extensionRegistry);
                if (applicationTimeoutsBuilder_ == null) {
                  ensureApplicationTimeoutsIsMutable();
                  applicationTimeouts_.add(m);
                } else {
                  applicationTimeoutsBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> applicationTimeouts_ =
        java.util.Collections.emptyList();
      private void ensureApplicationTimeoutsIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          applicationTimeouts_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto>(applicationTimeouts_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> applicationTimeoutsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> getApplicationTimeoutsList() {
        if (applicationTimeoutsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applicationTimeouts_);
        } else {
          return applicationTimeoutsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public int getApplicationTimeoutsCount() {
        if (applicationTimeoutsBuilder_ == null) {
          return applicationTimeouts_.size();
        } else {
          return applicationTimeoutsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index) {
        if (applicationTimeoutsBuilder_ == null) {
          return applicationTimeouts_.get(index);
        } else {
          return applicationTimeoutsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public Builder setApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) {
        if (applicationTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.set(index, value);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public Builder setApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationTimeoutsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public Builder addApplicationTimeouts(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) {
        if (applicationTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(value);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public Builder addApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) {
        if (applicationTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(index, value);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public Builder addApplicationTimeouts(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(builderForValue.build());
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public Builder addApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public Builder addAllApplicationTimeouts(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto> values) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applicationTimeouts_);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public Builder clearApplicationTimeouts() {
        if (applicationTimeoutsBuilder_ == null) {
          applicationTimeouts_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public Builder removeApplicationTimeouts(int index) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.remove(index);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder getApplicationTimeoutsBuilder(
          int index) {
        return getApplicationTimeoutsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder(
          int index) {
        if (applicationTimeoutsBuilder_ == null) {
          return applicationTimeouts_.get(index);  } else {
          return applicationTimeoutsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> 
           getApplicationTimeoutsOrBuilderList() {
        if (applicationTimeoutsBuilder_ != null) {
          return applicationTimeoutsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applicationTimeouts_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder addApplicationTimeoutsBuilder() {
        return getApplicationTimeoutsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder addApplicationTimeoutsBuilder(
          int index) {
        return getApplicationTimeoutsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder> 
           getApplicationTimeoutsBuilderList() {
        return getApplicationTimeoutsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> 
          getApplicationTimeoutsFieldBuilder() {
        if (applicationTimeoutsBuilder_ == null) {
          applicationTimeoutsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder>(
                  applicationTimeouts_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          applicationTimeouts_ = null;
        }
        return applicationTimeoutsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateApplicationTimeoutsResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateApplicationTimeoutsResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationTimeoutsResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<UpdateApplicationTimeoutsResponseProto>() {
      @java.lang.Override
      public UpdateApplicationTimeoutsResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationTimeoutsResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<UpdateApplicationTimeoutsResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetAllResourceProfilesRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAllResourceProfilesRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetAllResourceProfilesRequestProto}
   */
  public static final class GetAllResourceProfilesRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAllResourceProfilesRequestProto)
      GetAllResourceProfilesRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetAllResourceProfilesRequestProto.newBuilder() to construct.
    private GetAllResourceProfilesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetAllResourceProfilesRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetAllResourceProfilesRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetAllResourceProfilesRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAllResourceProfilesRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAllResourceProfilesRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAllResourceProfilesRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceProfilesRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetAllResourceProfilesRequestProto>() {
      @java.lang.Override
      public GetAllResourceProfilesRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceProfilesRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceProfilesRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetAllResourceProfilesResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAllResourceProfilesResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
     * @return Whether the resourceProfiles field is set.
     */
    boolean hasResourceProfiles();
    /**
     * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
     * @return The resourceProfiles.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles();
    /**
     * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetAllResourceProfilesResponseProto}
   */
  public static final class GetAllResourceProfilesResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAllResourceProfilesResponseProto)
      GetAllResourceProfilesResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetAllResourceProfilesResponseProto.newBuilder() to construct.
    private GetAllResourceProfilesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetAllResourceProfilesResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetAllResourceProfilesResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESOURCE_PROFILES_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto resourceProfiles_;
    /**
     * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
     * @return Whether the resourceProfiles field is set.
     */
    @java.lang.Override
    public boolean hasResourceProfiles() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
     * @return The resourceProfiles.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles() {
      return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_;
    }
    /**
     * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder() {
      return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasResourceProfiles()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getResourceProfiles().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getResourceProfiles());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getResourceProfiles());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto) obj;

      if (hasResourceProfiles() != other.hasResourceProfiles()) return false;
      if (hasResourceProfiles()) {
        if (!getResourceProfiles()
            .equals(other.getResourceProfiles())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasResourceProfiles()) {
        hash = (37 * hash) + RESOURCE_PROFILES_FIELD_NUMBER;
        hash = (53 * hash) + getResourceProfiles().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetAllResourceProfilesResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAllResourceProfilesResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getResourceProfilesFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        resourceProfiles_ = null;
        if (resourceProfilesBuilder_ != null) {
          resourceProfilesBuilder_.dispose();
          resourceProfilesBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.resourceProfiles_ = resourceProfilesBuilder_ == null
              ? resourceProfiles_
              : resourceProfilesBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.getDefaultInstance()) return this;
        if (other.hasResourceProfiles()) {
          mergeResourceProfiles(other.getResourceProfiles());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasResourceProfiles()) {
          return false;
        }
        if (!getResourceProfiles().isInitialized()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getResourceProfilesFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto resourceProfiles_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder> resourceProfilesBuilder_;
      /**
       * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
       * @return Whether the resourceProfiles field is set.
       */
      public boolean hasResourceProfiles() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
       * @return The resourceProfiles.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles() {
        if (resourceProfilesBuilder_ == null) {
          return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_;
        } else {
          return resourceProfilesBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
       */
      public Builder setResourceProfiles(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto value) {
        if (resourceProfilesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resourceProfiles_ = value;
        } else {
          resourceProfilesBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
       */
      public Builder setResourceProfiles(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder builderForValue) {
        if (resourceProfilesBuilder_ == null) {
          resourceProfiles_ = builderForValue.build();
        } else {
          resourceProfilesBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
       */
      public Builder mergeResourceProfiles(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto value) {
        if (resourceProfilesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            resourceProfiles_ != null &&
            resourceProfiles_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance()) {
            getResourceProfilesBuilder().mergeFrom(value);
          } else {
            resourceProfiles_ = value;
          }
        } else {
          resourceProfilesBuilder_.mergeFrom(value);
        }
        if (resourceProfiles_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
       */
      public Builder clearResourceProfiles() {
        bitField0_ = (bitField0_ & ~0x00000001);
        resourceProfiles_ = null;
        if (resourceProfilesBuilder_ != null) {
          resourceProfilesBuilder_.dispose();
          resourceProfilesBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder getResourceProfilesBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getResourceProfilesFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder() {
        if (resourceProfilesBuilder_ != null) {
          return resourceProfilesBuilder_.getMessageOrBuilder();
        } else {
          return resourceProfiles_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_;
        }
      }
      /**
       * <code>required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder> 
          getResourceProfilesFieldBuilder() {
        if (resourceProfilesBuilder_ == null) {
          resourceProfilesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder>(
                  getResourceProfiles(),
                  getParentForChildren(),
                  isClean());
          resourceProfiles_ = null;
        }
        return resourceProfilesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAllResourceProfilesResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAllResourceProfilesResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceProfilesResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetAllResourceProfilesResponseProto>() {
      @java.lang.Override
      public GetAllResourceProfilesResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceProfilesResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceProfilesResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetResourceProfileRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetResourceProfileRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string profile = 1;</code>
     * @return Whether the profile field is set.
     */
    boolean hasProfile();
    /**
     * <code>required string profile = 1;</code>
     * @return The profile.
     */
    java.lang.String getProfile();
    /**
     * <code>required string profile = 1;</code>
     * @return The bytes for profile.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getProfileBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetResourceProfileRequestProto}
   */
  public static final class GetResourceProfileRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetResourceProfileRequestProto)
      GetResourceProfileRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetResourceProfileRequestProto.newBuilder() to construct.
    private GetResourceProfileRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetResourceProfileRequestProto() {
      profile_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetResourceProfileRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int PROFILE_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object profile_ = "";
    /**
     * <code>required string profile = 1;</code>
     * @return Whether the profile field is set.
     */
    @java.lang.Override
    public boolean hasProfile() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string profile = 1;</code>
     * @return The profile.
     */
    @java.lang.Override
    public java.lang.String getProfile() {
      java.lang.Object ref = profile_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          profile_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string profile = 1;</code>
     * @return The bytes for profile.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getProfileBytes() {
      java.lang.Object ref = profile_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        profile_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasProfile()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, profile_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, profile_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto) obj;

      if (hasProfile() != other.hasProfile()) return false;
      if (hasProfile()) {
        if (!getProfile()
            .equals(other.getProfile())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasProfile()) {
        hash = (37 * hash) + PROFILE_FIELD_NUMBER;
        hash = (53 * hash) + getProfile().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetResourceProfileRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetResourceProfileRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        profile_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.profile_ = profile_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.getDefaultInstance()) return this;
        if (other.hasProfile()) {
          profile_ = other.profile_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasProfile()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                profile_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object profile_ = "";
      /**
       * <code>required string profile = 1;</code>
       * @return Whether the profile field is set.
       */
      public boolean hasProfile() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string profile = 1;</code>
       * @return The profile.
       */
      public java.lang.String getProfile() {
        java.lang.Object ref = profile_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            profile_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string profile = 1;</code>
       * @return The bytes for profile.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getProfileBytes() {
        java.lang.Object ref = profile_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          profile_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string profile = 1;</code>
       * @param value The profile to set.
       * @return This builder for chaining.
       */
      public Builder setProfile(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        profile_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string profile = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearProfile() {
        profile_ = getDefaultInstance().getProfile();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string profile = 1;</code>
       * @param value The bytes for profile to set.
       * @return This builder for chaining.
       */
      public Builder setProfileBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        profile_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetResourceProfileRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetResourceProfileRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetResourceProfileRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetResourceProfileRequestProto>() {
      @java.lang.Override
      public GetResourceProfileRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetResourceProfileRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetResourceProfileRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetResourceProfileResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetResourceProfileResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
     * @return Whether the resources field is set.
     */
    boolean hasResources();
    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
     * @return The resources.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources();
    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetResourceProfileResponseProto}
   */
  public static final class GetResourceProfileResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetResourceProfileResponseProto)
      GetResourceProfileResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetResourceProfileResponseProto.newBuilder() to construct.
    private GetResourceProfileResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetResourceProfileResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetResourceProfileResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESOURCES_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_;
    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
     * @return Whether the resources field is set.
     */
    @java.lang.Override
    public boolean hasResources() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
     * @return The resources.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() {
      return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
    }
    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() {
      return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasResources()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getResources().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getResources());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getResources());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto) obj;

      if (hasResources() != other.hasResources()) return false;
      if (hasResources()) {
        if (!getResources()
            .equals(other.getResources())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasResources()) {
        hash = (37 * hash) + RESOURCES_FIELD_NUMBER;
        hash = (53 * hash) + getResources().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetResourceProfileResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetResourceProfileResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getResourcesFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        resources_ = null;
        if (resourcesBuilder_ != null) {
          resourcesBuilder_.dispose();
          resourcesBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.resources_ = resourcesBuilder_ == null
              ? resources_
              : resourcesBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.getDefaultInstance()) return this;
        if (other.hasResources()) {
          mergeResources(other.getResources());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasResources()) {
          return false;
        }
        if (!getResources().isInitialized()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getResourcesFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourcesBuilder_;
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
       * @return Whether the resources field is set.
       */
      public boolean hasResources() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
       * @return The resources.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() {
        if (resourcesBuilder_ == null) {
          return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
        } else {
          return resourcesBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
       */
      public Builder setResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resources_ = value;
        } else {
          resourcesBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
       */
      public Builder setResources(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (resourcesBuilder_ == null) {
          resources_ = builderForValue.build();
        } else {
          resourcesBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
       */
      public Builder mergeResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourcesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            resources_ != null &&
            resources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getResourcesBuilder().mergeFrom(value);
          } else {
            resources_ = value;
          }
        } else {
          resourcesBuilder_.mergeFrom(value);
        }
        if (resources_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
       */
      public Builder clearResources() {
        bitField0_ = (bitField0_ & ~0x00000001);
        resources_ = null;
        if (resourcesBuilder_ != null) {
          resourcesBuilder_.dispose();
          resourcesBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourcesBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getResourcesFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() {
        if (resourcesBuilder_ != null) {
          return resourcesBuilder_.getMessageOrBuilder();
        } else {
          return resources_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
        }
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getResourcesFieldBuilder() {
        if (resourcesBuilder_ == null) {
          resourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getResources(),
                  getParentForChildren(),
                  isClean());
          resources_ = null;
        }
        return resourcesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetResourceProfileResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetResourceProfileResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetResourceProfileResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetResourceProfileResponseProto>() {
      @java.lang.Override
      public GetResourceProfileResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetResourceProfileResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetResourceProfileResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetAllResourceTypeInfoRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAllResourceTypeInfoRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetAllResourceTypeInfoRequestProto}
   */
  public static final class GetAllResourceTypeInfoRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAllResourceTypeInfoRequestProto)
      GetAllResourceTypeInfoRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetAllResourceTypeInfoRequestProto.newBuilder() to construct.
    private GetAllResourceTypeInfoRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetAllResourceTypeInfoRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetAllResourceTypeInfoRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetAllResourceTypeInfoRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAllResourceTypeInfoRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAllResourceTypeInfoRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAllResourceTypeInfoRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceTypeInfoRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetAllResourceTypeInfoRequestProto>() {
      @java.lang.Override
      public GetAllResourceTypeInfoRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceTypeInfoRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceTypeInfoRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetAllResourceTypeInfoResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAllResourceTypeInfoResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> 
        getResourceTypeInfoList();
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypeInfo(int index);
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
     */
    int getResourceTypeInfoCount();
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> 
        getResourceTypeInfoOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypeInfoOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetAllResourceTypeInfoResponseProto}
   */
  public static final class GetAllResourceTypeInfoResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAllResourceTypeInfoResponseProto)
      GetAllResourceTypeInfoResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetAllResourceTypeInfoResponseProto.newBuilder() to construct.
    private GetAllResourceTypeInfoResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetAllResourceTypeInfoResponseProto() {
      resourceTypeInfo_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetAllResourceTypeInfoResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.Builder.class);
    }

    public static final int RESOURCE_TYPE_INFO_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> resourceTypeInfo_;
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> getResourceTypeInfoList() {
      return resourceTypeInfo_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> 
        getResourceTypeInfoOrBuilderList() {
      return resourceTypeInfo_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
     */
    @java.lang.Override
    public int getResourceTypeInfoCount() {
      return resourceTypeInfo_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypeInfo(int index) {
      return resourceTypeInfo_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypeInfoOrBuilder(
        int index) {
      return resourceTypeInfo_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getResourceTypeInfoCount(); i++) {
        if (!getResourceTypeInfo(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < resourceTypeInfo_.size(); i++) {
        output.writeMessage(1, resourceTypeInfo_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < resourceTypeInfo_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, resourceTypeInfo_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto) obj;

      if (!getResourceTypeInfoList()
          .equals(other.getResourceTypeInfoList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getResourceTypeInfoCount() > 0) {
        hash = (37 * hash) + RESOURCE_TYPE_INFO_FIELD_NUMBER;
        hash = (53 * hash) + getResourceTypeInfoList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetAllResourceTypeInfoResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAllResourceTypeInfoResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (resourceTypeInfoBuilder_ == null) {
          resourceTypeInfo_ = java.util.Collections.emptyList();
        } else {
          resourceTypeInfo_ = null;
          resourceTypeInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto result) {
        if (resourceTypeInfoBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            resourceTypeInfo_ = java.util.Collections.unmodifiableList(resourceTypeInfo_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.resourceTypeInfo_ = resourceTypeInfo_;
        } else {
          result.resourceTypeInfo_ = resourceTypeInfoBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.getDefaultInstance()) return this;
        if (resourceTypeInfoBuilder_ == null) {
          if (!other.resourceTypeInfo_.isEmpty()) {
            if (resourceTypeInfo_.isEmpty()) {
              resourceTypeInfo_ = other.resourceTypeInfo_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureResourceTypeInfoIsMutable();
              resourceTypeInfo_.addAll(other.resourceTypeInfo_);
            }
            onChanged();
          }
        } else {
          if (!other.resourceTypeInfo_.isEmpty()) {
            if (resourceTypeInfoBuilder_.isEmpty()) {
              resourceTypeInfoBuilder_.dispose();
              resourceTypeInfoBuilder_ = null;
              resourceTypeInfo_ = other.resourceTypeInfo_;
              bitField0_ = (bitField0_ & ~0x00000001);
              resourceTypeInfoBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getResourceTypeInfoFieldBuilder() : null;
            } else {
              resourceTypeInfoBuilder_.addAllMessages(other.resourceTypeInfo_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getResourceTypeInfoCount(); i++) {
          if (!getResourceTypeInfo(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.PARSER,
                        extensionRegistry);
                if (resourceTypeInfoBuilder_ == null) {
                  ensureResourceTypeInfoIsMutable();
                  resourceTypeInfo_.add(m);
                } else {
                  resourceTypeInfoBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> resourceTypeInfo_ =
        java.util.Collections.emptyList();
      private void ensureResourceTypeInfoIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          resourceTypeInfo_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto>(resourceTypeInfo_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> resourceTypeInfoBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> getResourceTypeInfoList() {
        if (resourceTypeInfoBuilder_ == null) {
          return java.util.Collections.unmodifiableList(resourceTypeInfo_);
        } else {
          return resourceTypeInfoBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public int getResourceTypeInfoCount() {
        if (resourceTypeInfoBuilder_ == null) {
          return resourceTypeInfo_.size();
        } else {
          return resourceTypeInfoBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypeInfo(int index) {
        if (resourceTypeInfoBuilder_ == null) {
          return resourceTypeInfo_.get(index);
        } else {
          return resourceTypeInfoBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public Builder setResourceTypeInfo(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) {
        if (resourceTypeInfoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceTypeInfoIsMutable();
          resourceTypeInfo_.set(index, value);
          onChanged();
        } else {
          resourceTypeInfoBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public Builder setResourceTypeInfo(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) {
        if (resourceTypeInfoBuilder_ == null) {
          ensureResourceTypeInfoIsMutable();
          resourceTypeInfo_.set(index, builderForValue.build());
          onChanged();
        } else {
          resourceTypeInfoBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public Builder addResourceTypeInfo(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) {
        if (resourceTypeInfoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceTypeInfoIsMutable();
          resourceTypeInfo_.add(value);
          onChanged();
        } else {
          resourceTypeInfoBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public Builder addResourceTypeInfo(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) {
        if (resourceTypeInfoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceTypeInfoIsMutable();
          resourceTypeInfo_.add(index, value);
          onChanged();
        } else {
          resourceTypeInfoBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public Builder addResourceTypeInfo(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) {
        if (resourceTypeInfoBuilder_ == null) {
          ensureResourceTypeInfoIsMutable();
          resourceTypeInfo_.add(builderForValue.build());
          onChanged();
        } else {
          resourceTypeInfoBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public Builder addResourceTypeInfo(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) {
        if (resourceTypeInfoBuilder_ == null) {
          ensureResourceTypeInfoIsMutable();
          resourceTypeInfo_.add(index, builderForValue.build());
          onChanged();
        } else {
          resourceTypeInfoBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public Builder addAllResourceTypeInfo(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto> values) {
        if (resourceTypeInfoBuilder_ == null) {
          ensureResourceTypeInfoIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, resourceTypeInfo_);
          onChanged();
        } else {
          resourceTypeInfoBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public Builder clearResourceTypeInfo() {
        if (resourceTypeInfoBuilder_ == null) {
          resourceTypeInfo_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          resourceTypeInfoBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public Builder removeResourceTypeInfo(int index) {
        if (resourceTypeInfoBuilder_ == null) {
          ensureResourceTypeInfoIsMutable();
          resourceTypeInfo_.remove(index);
          onChanged();
        } else {
          resourceTypeInfoBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder getResourceTypeInfoBuilder(
          int index) {
        return getResourceTypeInfoFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypeInfoOrBuilder(
          int index) {
        if (resourceTypeInfoBuilder_ == null) {
          return resourceTypeInfo_.get(index);  } else {
          return resourceTypeInfoBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> 
           getResourceTypeInfoOrBuilderList() {
        if (resourceTypeInfoBuilder_ != null) {
          return resourceTypeInfoBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(resourceTypeInfo_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder addResourceTypeInfoBuilder() {
        return getResourceTypeInfoFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder addResourceTypeInfoBuilder(
          int index) {
        return getResourceTypeInfoFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder> 
           getResourceTypeInfoBuilderList() {
        return getResourceTypeInfoFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> 
          getResourceTypeInfoFieldBuilder() {
        if (resourceTypeInfoBuilder_ == null) {
          resourceTypeInfoBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder>(
                  resourceTypeInfo_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          resourceTypeInfo_ = null;
        }
        return resourceTypeInfoBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAllResourceTypeInfoResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAllResourceTypeInfoResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceTypeInfoResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetAllResourceTypeInfoResponseProto>() {
      @java.lang.Override
      public GetAllResourceTypeInfoResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceTypeInfoResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetAllResourceTypeInfoResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StartContainerRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StartContainerRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
     * @return Whether the containerLaunchContext field is set.
     */
    boolean hasContainerLaunchContext();
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
     * @return The containerLaunchContext.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext();
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder();

    /**
     * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
     * @return Whether the containerToken field is set.
     */
    boolean hasContainerToken();
    /**
     * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
     * @return The containerToken.
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken();
    /**
     * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.StartContainerRequestProto}
   */
  public static final class StartContainerRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StartContainerRequestProto)
      StartContainerRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StartContainerRequestProto.newBuilder() to construct.
    private StartContainerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StartContainerRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StartContainerRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_LAUNCH_CONTEXT_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto containerLaunchContext_;
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
     * @return Whether the containerLaunchContext field is set.
     */
    @java.lang.Override
    public boolean hasContainerLaunchContext() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
     * @return The containerLaunchContext.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext() {
      return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder() {
      return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_;
    }

    public static final int CONTAINER_TOKEN_FIELD_NUMBER = 2;
    private org.apache.hadoop.security.proto.SecurityProtos.TokenProto containerToken_;
    /**
     * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
     * @return Whether the containerToken field is set.
     */
    @java.lang.Override
    public boolean hasContainerToken() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
     * @return The containerToken.
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken() {
      return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_;
    }
    /**
     * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder() {
      return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasContainerToken()) {
        if (!getContainerToken().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerLaunchContext());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getContainerToken());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerLaunchContext());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getContainerToken());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto) obj;

      if (hasContainerLaunchContext() != other.hasContainerLaunchContext()) return false;
      if (hasContainerLaunchContext()) {
        if (!getContainerLaunchContext()
            .equals(other.getContainerLaunchContext())) return false;
      }
      if (hasContainerToken() != other.hasContainerToken()) return false;
      if (hasContainerToken()) {
        if (!getContainerToken()
            .equals(other.getContainerToken())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerLaunchContext()) {
        hash = (37 * hash) + CONTAINER_LAUNCH_CONTEXT_FIELD_NUMBER;
        hash = (53 * hash) + getContainerLaunchContext().hashCode();
      }
      if (hasContainerToken()) {
        hash = (37 * hash) + CONTAINER_TOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getContainerToken().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StartContainerRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StartContainerRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerLaunchContextFieldBuilder();
          getContainerTokenFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerLaunchContext_ = null;
        if (containerLaunchContextBuilder_ != null) {
          containerLaunchContextBuilder_.dispose();
          containerLaunchContextBuilder_ = null;
        }
        containerToken_ = null;
        if (containerTokenBuilder_ != null) {
          containerTokenBuilder_.dispose();
          containerTokenBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerLaunchContext_ = containerLaunchContextBuilder_ == null
              ? containerLaunchContext_
              : containerLaunchContextBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.containerToken_ = containerTokenBuilder_ == null
              ? containerToken_
              : containerTokenBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.getDefaultInstance()) return this;
        if (other.hasContainerLaunchContext()) {
          mergeContainerLaunchContext(other.getContainerLaunchContext());
        }
        if (other.hasContainerToken()) {
          mergeContainerToken(other.getContainerToken());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasContainerToken()) {
          if (!getContainerToken().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerLaunchContextFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getContainerTokenFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto containerLaunchContext_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> containerLaunchContextBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
       * @return Whether the containerLaunchContext field is set.
       */
      public boolean hasContainerLaunchContext() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
       * @return The containerLaunchContext.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext() {
        if (containerLaunchContextBuilder_ == null) {
          return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_;
        } else {
          return containerLaunchContextBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
       */
      public Builder setContainerLaunchContext(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) {
        if (containerLaunchContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerLaunchContext_ = value;
        } else {
          containerLaunchContextBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
       */
      public Builder setContainerLaunchContext(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder builderForValue) {
        if (containerLaunchContextBuilder_ == null) {
          containerLaunchContext_ = builderForValue.build();
        } else {
          containerLaunchContextBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
       */
      public Builder mergeContainerLaunchContext(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) {
        if (containerLaunchContextBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerLaunchContext_ != null &&
            containerLaunchContext_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance()) {
            getContainerLaunchContextBuilder().mergeFrom(value);
          } else {
            containerLaunchContext_ = value;
          }
        } else {
          containerLaunchContextBuilder_.mergeFrom(value);
        }
        if (containerLaunchContext_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
       */
      public Builder clearContainerLaunchContext() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerLaunchContext_ = null;
        if (containerLaunchContextBuilder_ != null) {
          containerLaunchContextBuilder_.dispose();
          containerLaunchContextBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder getContainerLaunchContextBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerLaunchContextFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder() {
        if (containerLaunchContextBuilder_ != null) {
          return containerLaunchContextBuilder_.getMessageOrBuilder();
        } else {
          return containerLaunchContext_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> 
          getContainerLaunchContextFieldBuilder() {
        if (containerLaunchContextBuilder_ == null) {
          containerLaunchContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder>(
                  getContainerLaunchContext(),
                  getParentForChildren(),
                  isClean());
          containerLaunchContext_ = null;
        }
        return containerLaunchContextBuilder_;
      }

      private org.apache.hadoop.security.proto.SecurityProtos.TokenProto containerToken_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> containerTokenBuilder_;
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
       * @return Whether the containerToken field is set.
       */
      public boolean hasContainerToken() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
       * @return The containerToken.
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken() {
        if (containerTokenBuilder_ == null) {
          return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_;
        } else {
          return containerTokenBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
       */
      public Builder setContainerToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (containerTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerToken_ = value;
        } else {
          containerTokenBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
       */
      public Builder setContainerToken(
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (containerTokenBuilder_ == null) {
          containerToken_ = builderForValue.build();
        } else {
          containerTokenBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
       */
      public Builder mergeContainerToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (containerTokenBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            containerToken_ != null &&
            containerToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) {
            getContainerTokenBuilder().mergeFrom(value);
          } else {
            containerToken_ = value;
          }
        } else {
          containerTokenBuilder_.mergeFrom(value);
        }
        if (containerToken_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
       */
      public Builder clearContainerToken() {
        bitField0_ = (bitField0_ & ~0x00000002);
        containerToken_ = null;
        if (containerTokenBuilder_ != null) {
          containerTokenBuilder_.dispose();
          containerTokenBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getContainerTokenBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getContainerTokenFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder() {
        if (containerTokenBuilder_ != null) {
          return containerTokenBuilder_.getMessageOrBuilder();
        } else {
          return containerToken_ == null ?
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_;
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
          getContainerTokenFieldBuilder() {
        if (containerTokenBuilder_ == null) {
          containerTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>(
                  getContainerToken(),
                  getParentForChildren(),
                  isClean());
          containerToken_ = null;
        }
        return containerTokenBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StartContainerRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StartContainerRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StartContainerRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StartContainerRequestProto>() {
      @java.lang.Override
      public StartContainerRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StartContainerRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StartContainerRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StartContainerResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StartContainerResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> 
        getServicesMetaDataList();
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index);
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    int getServicesMetaDataCount();
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
        getServicesMetaDataOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.StartContainerResponseProto}
   */
  public static final class StartContainerResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StartContainerResponseProto)
      StartContainerResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StartContainerResponseProto.newBuilder() to construct.
    private StartContainerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StartContainerResponseProto() {
      servicesMetaData_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StartContainerResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.Builder.class);
    }

    public static final int SERVICES_META_DATA_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> servicesMetaData_;
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> getServicesMetaDataList() {
      return servicesMetaData_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
        getServicesMetaDataOrBuilderList() {
      return servicesMetaData_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    @java.lang.Override
    public int getServicesMetaDataCount() {
      return servicesMetaData_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index) {
      return servicesMetaData_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder(
        int index) {
      return servicesMetaData_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < servicesMetaData_.size(); i++) {
        output.writeMessage(1, servicesMetaData_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < servicesMetaData_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, servicesMetaData_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto) obj;

      if (!getServicesMetaDataList()
          .equals(other.getServicesMetaDataList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getServicesMetaDataCount() > 0) {
        hash = (37 * hash) + SERVICES_META_DATA_FIELD_NUMBER;
        hash = (53 * hash) + getServicesMetaDataList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StartContainerResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StartContainerResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (servicesMetaDataBuilder_ == null) {
          servicesMetaData_ = java.util.Collections.emptyList();
        } else {
          servicesMetaData_ = null;
          servicesMetaDataBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto result) {
        if (servicesMetaDataBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            servicesMetaData_ = java.util.Collections.unmodifiableList(servicesMetaData_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.servicesMetaData_ = servicesMetaData_;
        } else {
          result.servicesMetaData_ = servicesMetaDataBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.getDefaultInstance()) return this;
        if (servicesMetaDataBuilder_ == null) {
          if (!other.servicesMetaData_.isEmpty()) {
            if (servicesMetaData_.isEmpty()) {
              servicesMetaData_ = other.servicesMetaData_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureServicesMetaDataIsMutable();
              servicesMetaData_.addAll(other.servicesMetaData_);
            }
            onChanged();
          }
        } else {
          if (!other.servicesMetaData_.isEmpty()) {
            if (servicesMetaDataBuilder_.isEmpty()) {
              servicesMetaDataBuilder_.dispose();
              servicesMetaDataBuilder_ = null;
              servicesMetaData_ = other.servicesMetaData_;
              bitField0_ = (bitField0_ & ~0x00000001);
              servicesMetaDataBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getServicesMetaDataFieldBuilder() : null;
            } else {
              servicesMetaDataBuilder_.addAllMessages(other.servicesMetaData_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.PARSER,
                        extensionRegistry);
                if (servicesMetaDataBuilder_ == null) {
                  ensureServicesMetaDataIsMutable();
                  servicesMetaData_.add(m);
                } else {
                  servicesMetaDataBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> servicesMetaData_ =
        java.util.Collections.emptyList();
      private void ensureServicesMetaDataIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          servicesMetaData_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto>(servicesMetaData_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> servicesMetaDataBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> getServicesMetaDataList() {
        if (servicesMetaDataBuilder_ == null) {
          return java.util.Collections.unmodifiableList(servicesMetaData_);
        } else {
          return servicesMetaDataBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public int getServicesMetaDataCount() {
        if (servicesMetaDataBuilder_ == null) {
          return servicesMetaData_.size();
        } else {
          return servicesMetaDataBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index) {
        if (servicesMetaDataBuilder_ == null) {
          return servicesMetaData_.get(index);
        } else {
          return servicesMetaDataBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder setServicesMetaData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) {
        if (servicesMetaDataBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.set(index, value);
          onChanged();
        } else {
          servicesMetaDataBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder setServicesMetaData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) {
        if (servicesMetaDataBuilder_ == null) {
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.set(index, builderForValue.build());
          onChanged();
        } else {
          servicesMetaDataBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder addServicesMetaData(org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) {
        if (servicesMetaDataBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.add(value);
          onChanged();
        } else {
          servicesMetaDataBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder addServicesMetaData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) {
        if (servicesMetaDataBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.add(index, value);
          onChanged();
        } else {
          servicesMetaDataBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder addServicesMetaData(
          org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) {
        if (servicesMetaDataBuilder_ == null) {
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.add(builderForValue.build());
          onChanged();
        } else {
          servicesMetaDataBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder addServicesMetaData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) {
        if (servicesMetaDataBuilder_ == null) {
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.add(index, builderForValue.build());
          onChanged();
        } else {
          servicesMetaDataBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder addAllServicesMetaData(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> values) {
        if (servicesMetaDataBuilder_ == null) {
          ensureServicesMetaDataIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, servicesMetaData_);
          onChanged();
        } else {
          servicesMetaDataBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder clearServicesMetaData() {
        if (servicesMetaDataBuilder_ == null) {
          servicesMetaData_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          servicesMetaDataBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder removeServicesMetaData(int index) {
        if (servicesMetaDataBuilder_ == null) {
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.remove(index);
          onChanged();
        } else {
          servicesMetaDataBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder getServicesMetaDataBuilder(
          int index) {
        return getServicesMetaDataFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder(
          int index) {
        if (servicesMetaDataBuilder_ == null) {
          return servicesMetaData_.get(index);  } else {
          return servicesMetaDataBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
           getServicesMetaDataOrBuilderList() {
        if (servicesMetaDataBuilder_ != null) {
          return servicesMetaDataBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(servicesMetaData_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServicesMetaDataBuilder() {
        return getServicesMetaDataFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServicesMetaDataBuilder(
          int index) {
        return getServicesMetaDataFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder> 
           getServicesMetaDataBuilderList() {
        return getServicesMetaDataFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
          getServicesMetaDataFieldBuilder() {
        if (servicesMetaDataBuilder_ == null) {
          servicesMetaDataBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder>(
                  servicesMetaData_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          servicesMetaData_ = null;
        }
        return servicesMetaDataBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StartContainerResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StartContainerResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StartContainerResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StartContainerResponseProto>() {
      @java.lang.Override
      public StartContainerResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StartContainerResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StartContainerResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StopContainerRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StopContainerRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    boolean hasContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.StopContainerRequestProto}
   */
  public static final class StopContainerRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StopContainerRequestProto)
      StopContainerRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StopContainerRequestProto.newBuilder() to construct.
    private StopContainerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StopContainerRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StopContainerRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    @java.lang.Override
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StopContainerRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StopContainerRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerId_ = containerIdBuilder_ == null
              ? containerId_
              : containerIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return Whether the containerId field is set.
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return The containerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerId_ != null &&
            containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getContainerIdBuilder().mergeFrom(value);
          } else {
            containerId_ = value;
          }
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        if (containerId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StopContainerRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StopContainerRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StopContainerRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StopContainerRequestProto>() {
      @java.lang.Override
      public StopContainerRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StopContainerRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StopContainerRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StopContainerResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StopContainerResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.StopContainerResponseProto}
   */
  public static final class StopContainerResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StopContainerResponseProto)
      StopContainerResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StopContainerResponseProto.newBuilder() to construct.
    private StopContainerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StopContainerResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StopContainerResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StopContainerResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StopContainerResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StopContainerResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StopContainerResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StopContainerResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StopContainerResponseProto>() {
      @java.lang.Override
      public StopContainerResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StopContainerResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StopContainerResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceLocalizationRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceLocalizationRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    boolean hasContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> 
        getLocalResourcesList();
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index);
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
     */
    int getLocalResourcesCount();
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> 
        getLocalResourcesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceLocalizationRequestProto}
   */
  public static final class ResourceLocalizationRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceLocalizationRequestProto)
      ResourceLocalizationRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceLocalizationRequestProto.newBuilder() to construct.
    private ResourceLocalizationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceLocalizationRequestProto() {
      localResources_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceLocalizationRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    @java.lang.Override
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int LOCAL_RESOURCES_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> localResources_;
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> getLocalResourcesList() {
      return localResources_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> 
        getLocalResourcesOrBuilderList() {
      return localResources_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
     */
    @java.lang.Override
    public int getLocalResourcesCount() {
      return localResources_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index) {
      return localResources_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder(
        int index) {
      return localResources_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      for (int i = 0; i < localResources_.size(); i++) {
        output.writeMessage(2, localResources_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      for (int i = 0; i < localResources_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, localResources_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (!getLocalResourcesList()
          .equals(other.getLocalResourcesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (getLocalResourcesCount() > 0) {
        hash = (37 * hash) + LOCAL_RESOURCES_FIELD_NUMBER;
        hash = (53 * hash) + getLocalResourcesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceLocalizationRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceLocalizationRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
          getLocalResourcesFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        if (localResourcesBuilder_ == null) {
          localResources_ = java.util.Collections.emptyList();
        } else {
          localResources_ = null;
          localResourcesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto result) {
        if (localResourcesBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            localResources_ = java.util.Collections.unmodifiableList(localResources_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.localResources_ = localResources_;
        } else {
          result.localResources_ = localResourcesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerId_ = containerIdBuilder_ == null
              ? containerId_
              : containerIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (localResourcesBuilder_ == null) {
          if (!other.localResources_.isEmpty()) {
            if (localResources_.isEmpty()) {
              localResources_ = other.localResources_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureLocalResourcesIsMutable();
              localResources_.addAll(other.localResources_);
            }
            onChanged();
          }
        } else {
          if (!other.localResources_.isEmpty()) {
            if (localResourcesBuilder_.isEmpty()) {
              localResourcesBuilder_.dispose();
              localResourcesBuilder_ = null;
              localResources_ = other.localResources_;
              bitField0_ = (bitField0_ & ~0x00000002);
              localResourcesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getLocalResourcesFieldBuilder() : null;
            } else {
              localResourcesBuilder_.addAllMessages(other.localResources_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.PARSER,
                        extensionRegistry);
                if (localResourcesBuilder_ == null) {
                  ensureLocalResourcesIsMutable();
                  localResources_.add(m);
                } else {
                  localResourcesBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return Whether the containerId field is set.
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return The containerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerId_ != null &&
            containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getContainerIdBuilder().mergeFrom(value);
          } else {
            containerId_ = value;
          }
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        if (containerId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> localResources_ =
        java.util.Collections.emptyList();
      private void ensureLocalResourcesIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          localResources_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto>(localResources_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> localResourcesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> getLocalResourcesList() {
        if (localResourcesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(localResources_);
        } else {
          return localResourcesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public int getLocalResourcesCount() {
        if (localResourcesBuilder_ == null) {
          return localResources_.size();
        } else {
          return localResourcesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index) {
        if (localResourcesBuilder_ == null) {
          return localResources_.get(index);
        } else {
          return localResourcesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public Builder setLocalResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) {
        if (localResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocalResourcesIsMutable();
          localResources_.set(index, value);
          onChanged();
        } else {
          localResourcesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public Builder setLocalResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) {
        if (localResourcesBuilder_ == null) {
          ensureLocalResourcesIsMutable();
          localResources_.set(index, builderForValue.build());
          onChanged();
        } else {
          localResourcesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public Builder addLocalResources(org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) {
        if (localResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocalResourcesIsMutable();
          localResources_.add(value);
          onChanged();
        } else {
          localResourcesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public Builder addLocalResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) {
        if (localResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocalResourcesIsMutable();
          localResources_.add(index, value);
          onChanged();
        } else {
          localResourcesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public Builder addLocalResources(
          org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) {
        if (localResourcesBuilder_ == null) {
          ensureLocalResourcesIsMutable();
          localResources_.add(builderForValue.build());
          onChanged();
        } else {
          localResourcesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public Builder addLocalResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) {
        if (localResourcesBuilder_ == null) {
          ensureLocalResourcesIsMutable();
          localResources_.add(index, builderForValue.build());
          onChanged();
        } else {
          localResourcesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public Builder addAllLocalResources(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> values) {
        if (localResourcesBuilder_ == null) {
          ensureLocalResourcesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, localResources_);
          onChanged();
        } else {
          localResourcesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public Builder clearLocalResources() {
        if (localResourcesBuilder_ == null) {
          localResources_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          localResourcesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public Builder removeLocalResources(int index) {
        if (localResourcesBuilder_ == null) {
          ensureLocalResourcesIsMutable();
          localResources_.remove(index);
          onChanged();
        } else {
          localResourcesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder getLocalResourcesBuilder(
          int index) {
        return getLocalResourcesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder(
          int index) {
        if (localResourcesBuilder_ == null) {
          return localResources_.get(index);  } else {
          return localResourcesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> 
           getLocalResourcesOrBuilderList() {
        if (localResourcesBuilder_ != null) {
          return localResourcesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(localResources_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder addLocalResourcesBuilder() {
        return getLocalResourcesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder addLocalResourcesBuilder(
          int index) {
        return getLocalResourcesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder> 
           getLocalResourcesBuilderList() {
        return getLocalResourcesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> 
          getLocalResourcesFieldBuilder() {
        if (localResourcesBuilder_ == null) {
          localResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder>(
                  localResources_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          localResources_ = null;
        }
        return localResourcesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceLocalizationRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceLocalizationRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceLocalizationRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceLocalizationRequestProto>() {
      @java.lang.Override
      public ResourceLocalizationRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceLocalizationRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceLocalizationRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceLocalizationResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceLocalizationResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceLocalizationResponseProto}
   */
  public static final class ResourceLocalizationResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceLocalizationResponseProto)
      ResourceLocalizationResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceLocalizationResponseProto.newBuilder() to construct.
    private ResourceLocalizationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceLocalizationResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceLocalizationResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceLocalizationResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceLocalizationResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceLocalizationResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceLocalizationResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceLocalizationResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceLocalizationResponseProto>() {
      @java.lang.Override
      public ResourceLocalizationResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceLocalizationResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceLocalizationResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReInitializeContainerRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReInitializeContainerRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    boolean hasContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
     * @return Whether the containerLaunchContext field is set.
     */
    boolean hasContainerLaunchContext();
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
     * @return The containerLaunchContext.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext();
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder();

    /**
     * <code>optional bool auto_commit = 3 [default = true];</code>
     * @return Whether the autoCommit field is set.
     */
    boolean hasAutoCommit();
    /**
     * <code>optional bool auto_commit = 3 [default = true];</code>
     * @return The autoCommit.
     */
    boolean getAutoCommit();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReInitializeContainerRequestProto}
   */
  public static final class ReInitializeContainerRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReInitializeContainerRequestProto)
      ReInitializeContainerRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReInitializeContainerRequestProto.newBuilder() to construct.
    private ReInitializeContainerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReInitializeContainerRequestProto() {
      autoCommit_ = true;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReInitializeContainerRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    @java.lang.Override
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int CONTAINER_LAUNCH_CONTEXT_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto containerLaunchContext_;
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
     * @return Whether the containerLaunchContext field is set.
     */
    @java.lang.Override
    public boolean hasContainerLaunchContext() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
     * @return The containerLaunchContext.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext() {
      return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder() {
      return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_;
    }

    public static final int AUTO_COMMIT_FIELD_NUMBER = 3;
    private boolean autoCommit_ = true;
    /**
     * <code>optional bool auto_commit = 3 [default = true];</code>
     * @return Whether the autoCommit field is set.
     */
    @java.lang.Override
    public boolean hasAutoCommit() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional bool auto_commit = 3 [default = true];</code>
     * @return The autoCommit.
     */
    @java.lang.Override
    public boolean getAutoCommit() {
      return autoCommit_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getContainerLaunchContext());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeBool(3, autoCommit_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getContainerLaunchContext());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(3, autoCommit_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (hasContainerLaunchContext() != other.hasContainerLaunchContext()) return false;
      if (hasContainerLaunchContext()) {
        if (!getContainerLaunchContext()
            .equals(other.getContainerLaunchContext())) return false;
      }
      if (hasAutoCommit() != other.hasAutoCommit()) return false;
      if (hasAutoCommit()) {
        if (getAutoCommit()
            != other.getAutoCommit()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (hasContainerLaunchContext()) {
        hash = (37 * hash) + CONTAINER_LAUNCH_CONTEXT_FIELD_NUMBER;
        hash = (53 * hash) + getContainerLaunchContext().hashCode();
      }
      if (hasAutoCommit()) {
        hash = (37 * hash) + AUTO_COMMIT_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getAutoCommit());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReInitializeContainerRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReInitializeContainerRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
          getContainerLaunchContextFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        containerLaunchContext_ = null;
        if (containerLaunchContextBuilder_ != null) {
          containerLaunchContextBuilder_.dispose();
          containerLaunchContextBuilder_ = null;
        }
        autoCommit_ = true;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerId_ = containerIdBuilder_ == null
              ? containerId_
              : containerIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.containerLaunchContext_ = containerLaunchContextBuilder_ == null
              ? containerLaunchContext_
              : containerLaunchContextBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.autoCommit_ = autoCommit_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (other.hasContainerLaunchContext()) {
          mergeContainerLaunchContext(other.getContainerLaunchContext());
        }
        if (other.hasAutoCommit()) {
          setAutoCommit(other.getAutoCommit());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getContainerLaunchContextFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                autoCommit_ = input.readBool();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return Whether the containerId field is set.
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return The containerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerId_ != null &&
            containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getContainerIdBuilder().mergeFrom(value);
          } else {
            containerId_ = value;
          }
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        if (containerId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto containerLaunchContext_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> containerLaunchContextBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
       * @return Whether the containerLaunchContext field is set.
       */
      public boolean hasContainerLaunchContext() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
       * @return The containerLaunchContext.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext() {
        if (containerLaunchContextBuilder_ == null) {
          return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_;
        } else {
          return containerLaunchContextBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
       */
      public Builder setContainerLaunchContext(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) {
        if (containerLaunchContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerLaunchContext_ = value;
        } else {
          containerLaunchContextBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
       */
      public Builder setContainerLaunchContext(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder builderForValue) {
        if (containerLaunchContextBuilder_ == null) {
          containerLaunchContext_ = builderForValue.build();
        } else {
          containerLaunchContextBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
       */
      public Builder mergeContainerLaunchContext(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) {
        if (containerLaunchContextBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            containerLaunchContext_ != null &&
            containerLaunchContext_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance()) {
            getContainerLaunchContextBuilder().mergeFrom(value);
          } else {
            containerLaunchContext_ = value;
          }
        } else {
          containerLaunchContextBuilder_.mergeFrom(value);
        }
        if (containerLaunchContext_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
       */
      public Builder clearContainerLaunchContext() {
        bitField0_ = (bitField0_ & ~0x00000002);
        containerLaunchContext_ = null;
        if (containerLaunchContextBuilder_ != null) {
          containerLaunchContextBuilder_.dispose();
          containerLaunchContextBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder getContainerLaunchContextBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getContainerLaunchContextFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder() {
        if (containerLaunchContextBuilder_ != null) {
          return containerLaunchContextBuilder_.getMessageOrBuilder();
        } else {
          return containerLaunchContext_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> 
          getContainerLaunchContextFieldBuilder() {
        if (containerLaunchContextBuilder_ == null) {
          containerLaunchContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder>(
                  getContainerLaunchContext(),
                  getParentForChildren(),
                  isClean());
          containerLaunchContext_ = null;
        }
        return containerLaunchContextBuilder_;
      }

      private boolean autoCommit_ = true;
      /**
       * <code>optional bool auto_commit = 3 [default = true];</code>
       * @return Whether the autoCommit field is set.
       */
      @java.lang.Override
      public boolean hasAutoCommit() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional bool auto_commit = 3 [default = true];</code>
       * @return The autoCommit.
       */
      @java.lang.Override
      public boolean getAutoCommit() {
        return autoCommit_;
      }
      /**
       * <code>optional bool auto_commit = 3 [default = true];</code>
       * @param value The autoCommit to set.
       * @return This builder for chaining.
       */
      public Builder setAutoCommit(boolean value) {

        autoCommit_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool auto_commit = 3 [default = true];</code>
       * @return This builder for chaining.
       */
      public Builder clearAutoCommit() {
        bitField0_ = (bitField0_ & ~0x00000004);
        autoCommit_ = true;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReInitializeContainerRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReInitializeContainerRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReInitializeContainerRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReInitializeContainerRequestProto>() {
      @java.lang.Override
      public ReInitializeContainerRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReInitializeContainerRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReInitializeContainerRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReInitializeContainerResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReInitializeContainerResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReInitializeContainerResponseProto}
   */
  public static final class ReInitializeContainerResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReInitializeContainerResponseProto)
      ReInitializeContainerResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReInitializeContainerResponseProto.newBuilder() to construct.
    private ReInitializeContainerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReInitializeContainerResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReInitializeContainerResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReInitializeContainerResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReInitializeContainerResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReInitializeContainerResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReInitializeContainerResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReInitializeContainerResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReInitializeContainerResponseProto>() {
      @java.lang.Override
      public ReInitializeContainerResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReInitializeContainerResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReInitializeContainerResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface RestartContainerResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.RestartContainerResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.RestartContainerResponseProto}
   */
  public static final class RestartContainerResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.RestartContainerResponseProto)
      RestartContainerResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use RestartContainerResponseProto.newBuilder() to construct.
    private RestartContainerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private RestartContainerResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new RestartContainerResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RestartContainerResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.RestartContainerResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.RestartContainerResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RestartContainerResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.RestartContainerResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.RestartContainerResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<RestartContainerResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<RestartContainerResponseProto>() {
      @java.lang.Override
      public RestartContainerResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<RestartContainerResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<RestartContainerResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface RollbackResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.RollbackResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.RollbackResponseProto}
   */
  public static final class RollbackResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.RollbackResponseProto)
      RollbackResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use RollbackResponseProto.newBuilder() to construct.
    private RollbackResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private RollbackResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new RollbackResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RollbackResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RollbackResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.RollbackResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.RollbackResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RollbackResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RollbackResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RollbackResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.RollbackResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.RollbackResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<RollbackResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<RollbackResponseProto>() {
      @java.lang.Override
      public RollbackResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<RollbackResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<RollbackResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface CommitResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.CommitResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.CommitResponseProto}
   */
  public static final class CommitResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.CommitResponseProto)
      CommitResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use CommitResponseProto.newBuilder() to construct.
    private CommitResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private CommitResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new CommitResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_CommitResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_CommitResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.CommitResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.CommitResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_CommitResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_CommitResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_CommitResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.CommitResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.CommitResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<CommitResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<CommitResponseProto>() {
      @java.lang.Override
      public CommitResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<CommitResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<CommitResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StartContainersRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StartContainersRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto> 
        getStartContainerRequestList();
    /**
     * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getStartContainerRequest(int index);
    /**
     * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
     */
    int getStartContainerRequestCount();
    /**
     * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder> 
        getStartContainerRequestOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder getStartContainerRequestOrBuilder(
        int index);
  }
  /**
   * <pre>
   *&#47;/ bulk API records
   * </pre>
   *
   * Protobuf type {@code hadoop.yarn.StartContainersRequestProto}
   */
  public static final class StartContainersRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StartContainersRequestProto)
      StartContainersRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StartContainersRequestProto.newBuilder() to construct.
    private StartContainersRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StartContainersRequestProto() {
      startContainerRequest_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StartContainersRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.Builder.class);
    }

    public static final int START_CONTAINER_REQUEST_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto> startContainerRequest_;
    /**
     * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto> getStartContainerRequestList() {
      return startContainerRequest_;
    }
    /**
     * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder> 
        getStartContainerRequestOrBuilderList() {
      return startContainerRequest_;
    }
    /**
     * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
     */
    @java.lang.Override
    public int getStartContainerRequestCount() {
      return startContainerRequest_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getStartContainerRequest(int index) {
      return startContainerRequest_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder getStartContainerRequestOrBuilder(
        int index) {
      return startContainerRequest_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getStartContainerRequestCount(); i++) {
        if (!getStartContainerRequest(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < startContainerRequest_.size(); i++) {
        output.writeMessage(1, startContainerRequest_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < startContainerRequest_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, startContainerRequest_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto) obj;

      if (!getStartContainerRequestList()
          .equals(other.getStartContainerRequestList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getStartContainerRequestCount() > 0) {
        hash = (37 * hash) + START_CONTAINER_REQUEST_FIELD_NUMBER;
        hash = (53 * hash) + getStartContainerRequestList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * <pre>
     *&#47;/ bulk API records
     * </pre>
     *
     * Protobuf type {@code hadoop.yarn.StartContainersRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StartContainersRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (startContainerRequestBuilder_ == null) {
          startContainerRequest_ = java.util.Collections.emptyList();
        } else {
          startContainerRequest_ = null;
          startContainerRequestBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto result) {
        if (startContainerRequestBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            startContainerRequest_ = java.util.Collections.unmodifiableList(startContainerRequest_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.startContainerRequest_ = startContainerRequest_;
        } else {
          result.startContainerRequest_ = startContainerRequestBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.getDefaultInstance()) return this;
        if (startContainerRequestBuilder_ == null) {
          if (!other.startContainerRequest_.isEmpty()) {
            if (startContainerRequest_.isEmpty()) {
              startContainerRequest_ = other.startContainerRequest_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureStartContainerRequestIsMutable();
              startContainerRequest_.addAll(other.startContainerRequest_);
            }
            onChanged();
          }
        } else {
          if (!other.startContainerRequest_.isEmpty()) {
            if (startContainerRequestBuilder_.isEmpty()) {
              startContainerRequestBuilder_.dispose();
              startContainerRequestBuilder_ = null;
              startContainerRequest_ = other.startContainerRequest_;
              bitField0_ = (bitField0_ & ~0x00000001);
              startContainerRequestBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getStartContainerRequestFieldBuilder() : null;
            } else {
              startContainerRequestBuilder_.addAllMessages(other.startContainerRequest_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getStartContainerRequestCount(); i++) {
          if (!getStartContainerRequest(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.PARSER,
                        extensionRegistry);
                if (startContainerRequestBuilder_ == null) {
                  ensureStartContainerRequestIsMutable();
                  startContainerRequest_.add(m);
                } else {
                  startContainerRequestBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto> startContainerRequest_ =
        java.util.Collections.emptyList();
      private void ensureStartContainerRequestIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          startContainerRequest_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto>(startContainerRequest_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder> startContainerRequestBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto> getStartContainerRequestList() {
        if (startContainerRequestBuilder_ == null) {
          return java.util.Collections.unmodifiableList(startContainerRequest_);
        } else {
          return startContainerRequestBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public int getStartContainerRequestCount() {
        if (startContainerRequestBuilder_ == null) {
          return startContainerRequest_.size();
        } else {
          return startContainerRequestBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getStartContainerRequest(int index) {
        if (startContainerRequestBuilder_ == null) {
          return startContainerRequest_.get(index);
        } else {
          return startContainerRequestBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public Builder setStartContainerRequest(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto value) {
        if (startContainerRequestBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureStartContainerRequestIsMutable();
          startContainerRequest_.set(index, value);
          onChanged();
        } else {
          startContainerRequestBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public Builder setStartContainerRequest(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder builderForValue) {
        if (startContainerRequestBuilder_ == null) {
          ensureStartContainerRequestIsMutable();
          startContainerRequest_.set(index, builderForValue.build());
          onChanged();
        } else {
          startContainerRequestBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public Builder addStartContainerRequest(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto value) {
        if (startContainerRequestBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureStartContainerRequestIsMutable();
          startContainerRequest_.add(value);
          onChanged();
        } else {
          startContainerRequestBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public Builder addStartContainerRequest(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto value) {
        if (startContainerRequestBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureStartContainerRequestIsMutable();
          startContainerRequest_.add(index, value);
          onChanged();
        } else {
          startContainerRequestBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public Builder addStartContainerRequest(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder builderForValue) {
        if (startContainerRequestBuilder_ == null) {
          ensureStartContainerRequestIsMutable();
          startContainerRequest_.add(builderForValue.build());
          onChanged();
        } else {
          startContainerRequestBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public Builder addStartContainerRequest(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder builderForValue) {
        if (startContainerRequestBuilder_ == null) {
          ensureStartContainerRequestIsMutable();
          startContainerRequest_.add(index, builderForValue.build());
          onChanged();
        } else {
          startContainerRequestBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public Builder addAllStartContainerRequest(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto> values) {
        if (startContainerRequestBuilder_ == null) {
          ensureStartContainerRequestIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, startContainerRequest_);
          onChanged();
        } else {
          startContainerRequestBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public Builder clearStartContainerRequest() {
        if (startContainerRequestBuilder_ == null) {
          startContainerRequest_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          startContainerRequestBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public Builder removeStartContainerRequest(int index) {
        if (startContainerRequestBuilder_ == null) {
          ensureStartContainerRequestIsMutable();
          startContainerRequest_.remove(index);
          onChanged();
        } else {
          startContainerRequestBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder getStartContainerRequestBuilder(
          int index) {
        return getStartContainerRequestFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder getStartContainerRequestOrBuilder(
          int index) {
        if (startContainerRequestBuilder_ == null) {
          return startContainerRequest_.get(index);  } else {
          return startContainerRequestBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder> 
           getStartContainerRequestOrBuilderList() {
        if (startContainerRequestBuilder_ != null) {
          return startContainerRequestBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(startContainerRequest_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder addStartContainerRequestBuilder() {
        return getStartContainerRequestFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder addStartContainerRequestBuilder(
          int index) {
        return getStartContainerRequestFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder> 
           getStartContainerRequestBuilderList() {
        return getStartContainerRequestFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder> 
          getStartContainerRequestFieldBuilder() {
        if (startContainerRequestBuilder_ == null) {
          startContainerRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder>(
                  startContainerRequest_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          startContainerRequest_ = null;
        }
        return startContainerRequestBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StartContainersRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StartContainersRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StartContainersRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StartContainersRequestProto>() {
      @java.lang.Override
      public StartContainersRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StartContainersRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StartContainersRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerExceptionMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerExceptionMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    boolean hasContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
     * @return Whether the exception field is set.
     */
    boolean hasException();
    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
     * @return The exception.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getException();
    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getExceptionOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerExceptionMapProto}
   */
  public static final class ContainerExceptionMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerExceptionMapProto)
      ContainerExceptionMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerExceptionMapProto.newBuilder() to construct.
    private ContainerExceptionMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerExceptionMapProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerExceptionMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerExceptionMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    @java.lang.Override
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int EXCEPTION_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto exception_;
    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
     * @return Whether the exception field is set.
     */
    @java.lang.Override
    public boolean hasException() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
     * @return The exception.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getException() {
      return exception_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : exception_;
    }
    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getExceptionOrBuilder() {
      return exception_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : exception_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getException());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getException());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (hasException() != other.hasException()) return false;
      if (hasException()) {
        if (!getException()
            .equals(other.getException())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (hasException()) {
        hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
        hash = (53 * hash) + getException().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerExceptionMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerExceptionMapProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerExceptionMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
          getExceptionFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        exception_ = null;
        if (exceptionBuilder_ != null) {
          exceptionBuilder_.dispose();
          exceptionBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerId_ = containerIdBuilder_ == null
              ? containerId_
              : containerIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.exception_ = exceptionBuilder_ == null
              ? exception_
              : exceptionBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (other.hasException()) {
          mergeException(other.getException());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getExceptionFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return Whether the containerId field is set.
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return The containerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerId_ != null &&
            containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getContainerIdBuilder().mergeFrom(value);
          } else {
            containerId_ = value;
          }
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        if (containerId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto exception_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder> exceptionBuilder_;
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
       * @return Whether the exception field is set.
       */
      public boolean hasException() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
       * @return The exception.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getException() {
        if (exceptionBuilder_ == null) {
          return exception_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : exception_;
        } else {
          return exceptionBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
       */
      public Builder setException(org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto value) {
        if (exceptionBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          exception_ = value;
        } else {
          exceptionBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
       */
      public Builder setException(
          org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder builderForValue) {
        if (exceptionBuilder_ == null) {
          exception_ = builderForValue.build();
        } else {
          exceptionBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
       */
      public Builder mergeException(org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto value) {
        if (exceptionBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            exception_ != null &&
            exception_ != org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance()) {
            getExceptionBuilder().mergeFrom(value);
          } else {
            exception_ = value;
          }
        } else {
          exceptionBuilder_.mergeFrom(value);
        }
        if (exception_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
       */
      public Builder clearException() {
        bitField0_ = (bitField0_ & ~0x00000002);
        exception_ = null;
        if (exceptionBuilder_ != null) {
          exceptionBuilder_.dispose();
          exceptionBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder getExceptionBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getExceptionFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getExceptionOrBuilder() {
        if (exceptionBuilder_ != null) {
          return exceptionBuilder_.getMessageOrBuilder();
        } else {
          return exception_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : exception_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto exception = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder> 
          getExceptionFieldBuilder() {
        if (exceptionBuilder_ == null) {
          exceptionBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder>(
                  getException(),
                  getParentForChildren(),
                  isClean());
          exception_ = null;
        }
        return exceptionBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerExceptionMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerExceptionMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerExceptionMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerExceptionMapProto>() {
      @java.lang.Override
      public ContainerExceptionMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerExceptionMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerExceptionMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StartContainersResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StartContainersResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> 
        getServicesMetaDataList();
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index);
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    int getServicesMetaDataCount();
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
        getServicesMetaDataOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> 
        getSucceededRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
     */
    int getSucceededRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getSucceededRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> 
        getFailedRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
     */
    int getFailedRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.StartContainersResponseProto}
   */
  public static final class StartContainersResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StartContainersResponseProto)
      StartContainersResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StartContainersResponseProto.newBuilder() to construct.
    private StartContainersResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StartContainersResponseProto() {
      servicesMetaData_ = java.util.Collections.emptyList();
      succeededRequests_ = java.util.Collections.emptyList();
      failedRequests_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StartContainersResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.Builder.class);
    }

    public static final int SERVICES_META_DATA_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> servicesMetaData_;
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> getServicesMetaDataList() {
      return servicesMetaData_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
        getServicesMetaDataOrBuilderList() {
      return servicesMetaData_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    @java.lang.Override
    public int getServicesMetaDataCount() {
      return servicesMetaData_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index) {
      return servicesMetaData_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder(
        int index) {
      return servicesMetaData_.get(index);
    }

    public static final int SUCCEEDED_REQUESTS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> succeededRequests_;
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getSucceededRequestsList() {
      return succeededRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getSucceededRequestsOrBuilderList() {
      return succeededRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
     */
    @java.lang.Override
    public int getSucceededRequestsCount() {
      return succeededRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) {
      return succeededRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
        int index) {
      return succeededRequests_.get(index);
    }

    public static final int FAILED_REQUESTS_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_;
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
     */
    @java.lang.Override
    public int getFailedRequestsCount() {
      return failedRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
      return failedRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index) {
      return failedRequests_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < servicesMetaData_.size(); i++) {
        output.writeMessage(1, servicesMetaData_.get(i));
      }
      for (int i = 0; i < succeededRequests_.size(); i++) {
        output.writeMessage(2, succeededRequests_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        output.writeMessage(3, failedRequests_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < servicesMetaData_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, servicesMetaData_.get(i));
      }
      for (int i = 0; i < succeededRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, succeededRequests_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, failedRequests_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto) obj;

      if (!getServicesMetaDataList()
          .equals(other.getServicesMetaDataList())) return false;
      if (!getSucceededRequestsList()
          .equals(other.getSucceededRequestsList())) return false;
      if (!getFailedRequestsList()
          .equals(other.getFailedRequestsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getServicesMetaDataCount() > 0) {
        hash = (37 * hash) + SERVICES_META_DATA_FIELD_NUMBER;
        hash = (53 * hash) + getServicesMetaDataList().hashCode();
      }
      if (getSucceededRequestsCount() > 0) {
        hash = (37 * hash) + SUCCEEDED_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getSucceededRequestsList().hashCode();
      }
      if (getFailedRequestsCount() > 0) {
        hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getFailedRequestsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StartContainersResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StartContainersResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (servicesMetaDataBuilder_ == null) {
          servicesMetaData_ = java.util.Collections.emptyList();
        } else {
          servicesMetaData_ = null;
          servicesMetaDataBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        if (succeededRequestsBuilder_ == null) {
          succeededRequests_ = java.util.Collections.emptyList();
        } else {
          succeededRequests_ = null;
          succeededRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
        } else {
          failedRequests_ = null;
          failedRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto result) {
        if (servicesMetaDataBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            servicesMetaData_ = java.util.Collections.unmodifiableList(servicesMetaData_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.servicesMetaData_ = servicesMetaData_;
        } else {
          result.servicesMetaData_ = servicesMetaDataBuilder_.build();
        }
        if (succeededRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.succeededRequests_ = succeededRequests_;
        } else {
          result.succeededRequests_ = succeededRequestsBuilder_.build();
        }
        if (failedRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0)) {
            failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_);
            bitField0_ = (bitField0_ & ~0x00000004);
          }
          result.failedRequests_ = failedRequests_;
        } else {
          result.failedRequests_ = failedRequestsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.getDefaultInstance()) return this;
        if (servicesMetaDataBuilder_ == null) {
          if (!other.servicesMetaData_.isEmpty()) {
            if (servicesMetaData_.isEmpty()) {
              servicesMetaData_ = other.servicesMetaData_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureServicesMetaDataIsMutable();
              servicesMetaData_.addAll(other.servicesMetaData_);
            }
            onChanged();
          }
        } else {
          if (!other.servicesMetaData_.isEmpty()) {
            if (servicesMetaDataBuilder_.isEmpty()) {
              servicesMetaDataBuilder_.dispose();
              servicesMetaDataBuilder_ = null;
              servicesMetaData_ = other.servicesMetaData_;
              bitField0_ = (bitField0_ & ~0x00000001);
              servicesMetaDataBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getServicesMetaDataFieldBuilder() : null;
            } else {
              servicesMetaDataBuilder_.addAllMessages(other.servicesMetaData_);
            }
          }
        }
        if (succeededRequestsBuilder_ == null) {
          if (!other.succeededRequests_.isEmpty()) {
            if (succeededRequests_.isEmpty()) {
              succeededRequests_ = other.succeededRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureSucceededRequestsIsMutable();
              succeededRequests_.addAll(other.succeededRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.succeededRequests_.isEmpty()) {
            if (succeededRequestsBuilder_.isEmpty()) {
              succeededRequestsBuilder_.dispose();
              succeededRequestsBuilder_ = null;
              succeededRequests_ = other.succeededRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
              succeededRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getSucceededRequestsFieldBuilder() : null;
            } else {
              succeededRequestsBuilder_.addAllMessages(other.succeededRequests_);
            }
          }
        }
        if (failedRequestsBuilder_ == null) {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequests_.isEmpty()) {
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000004);
            } else {
              ensureFailedRequestsIsMutable();
              failedRequests_.addAll(other.failedRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequestsBuilder_.isEmpty()) {
              failedRequestsBuilder_.dispose();
              failedRequestsBuilder_ = null;
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000004);
              failedRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getFailedRequestsFieldBuilder() : null;
            } else {
              failedRequestsBuilder_.addAllMessages(other.failedRequests_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.PARSER,
                        extensionRegistry);
                if (servicesMetaDataBuilder_ == null) {
                  ensureServicesMetaDataIsMutable();
                  servicesMetaData_.add(m);
                } else {
                  servicesMetaDataBuilder_.addMessage(m);
                }
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER,
                        extensionRegistry);
                if (succeededRequestsBuilder_ == null) {
                  ensureSucceededRequestsIsMutable();
                  succeededRequests_.add(m);
                } else {
                  succeededRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              case 26: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER,
                        extensionRegistry);
                if (failedRequestsBuilder_ == null) {
                  ensureFailedRequestsIsMutable();
                  failedRequests_.add(m);
                } else {
                  failedRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> servicesMetaData_ =
        java.util.Collections.emptyList();
      private void ensureServicesMetaDataIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          servicesMetaData_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto>(servicesMetaData_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> servicesMetaDataBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> getServicesMetaDataList() {
        if (servicesMetaDataBuilder_ == null) {
          return java.util.Collections.unmodifiableList(servicesMetaData_);
        } else {
          return servicesMetaDataBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public int getServicesMetaDataCount() {
        if (servicesMetaDataBuilder_ == null) {
          return servicesMetaData_.size();
        } else {
          return servicesMetaDataBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index) {
        if (servicesMetaDataBuilder_ == null) {
          return servicesMetaData_.get(index);
        } else {
          return servicesMetaDataBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder setServicesMetaData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) {
        if (servicesMetaDataBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.set(index, value);
          onChanged();
        } else {
          servicesMetaDataBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder setServicesMetaData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) {
        if (servicesMetaDataBuilder_ == null) {
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.set(index, builderForValue.build());
          onChanged();
        } else {
          servicesMetaDataBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder addServicesMetaData(org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) {
        if (servicesMetaDataBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.add(value);
          onChanged();
        } else {
          servicesMetaDataBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder addServicesMetaData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) {
        if (servicesMetaDataBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.add(index, value);
          onChanged();
        } else {
          servicesMetaDataBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder addServicesMetaData(
          org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) {
        if (servicesMetaDataBuilder_ == null) {
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.add(builderForValue.build());
          onChanged();
        } else {
          servicesMetaDataBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder addServicesMetaData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) {
        if (servicesMetaDataBuilder_ == null) {
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.add(index, builderForValue.build());
          onChanged();
        } else {
          servicesMetaDataBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder addAllServicesMetaData(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> values) {
        if (servicesMetaDataBuilder_ == null) {
          ensureServicesMetaDataIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, servicesMetaData_);
          onChanged();
        } else {
          servicesMetaDataBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder clearServicesMetaData() {
        if (servicesMetaDataBuilder_ == null) {
          servicesMetaData_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          servicesMetaDataBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public Builder removeServicesMetaData(int index) {
        if (servicesMetaDataBuilder_ == null) {
          ensureServicesMetaDataIsMutable();
          servicesMetaData_.remove(index);
          onChanged();
        } else {
          servicesMetaDataBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder getServicesMetaDataBuilder(
          int index) {
        return getServicesMetaDataFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder(
          int index) {
        if (servicesMetaDataBuilder_ == null) {
          return servicesMetaData_.get(index);  } else {
          return servicesMetaDataBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
           getServicesMetaDataOrBuilderList() {
        if (servicesMetaDataBuilder_ != null) {
          return servicesMetaDataBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(servicesMetaData_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServicesMetaDataBuilder() {
        return getServicesMetaDataFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServicesMetaDataBuilder(
          int index) {
        return getServicesMetaDataFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder> 
           getServicesMetaDataBuilderList() {
        return getServicesMetaDataFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
          getServicesMetaDataFieldBuilder() {
        if (servicesMetaDataBuilder_ == null) {
          servicesMetaDataBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder>(
                  servicesMetaData_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          servicesMetaData_ = null;
        }
        return servicesMetaDataBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> succeededRequests_ =
        java.util.Collections.emptyList();
      private void ensureSucceededRequestsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          succeededRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto>(succeededRequests_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> succeededRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getSucceededRequestsList() {
        if (succeededRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(succeededRequests_);
        } else {
          return succeededRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public int getSucceededRequestsCount() {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.size();
        } else {
          return succeededRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.get(index);
        } else {
          return succeededRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public Builder setSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.set(index, value);
          onChanged();
        } else {
          succeededRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public Builder setSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public Builder addSucceededRequests(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(value);
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public Builder addSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(index, value);
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public Builder addSucceededRequests(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public Builder addSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public Builder addAllSucceededRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> values) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, succeededRequests_);
          onChanged();
        } else {
          succeededRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public Builder clearSucceededRequests() {
        if (succeededRequestsBuilder_ == null) {
          succeededRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          succeededRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public Builder removeSucceededRequests(int index) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.remove(index);
          onChanged();
        } else {
          succeededRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getSucceededRequestsBuilder(
          int index) {
        return getSucceededRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
          int index) {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.get(index);  } else {
          return succeededRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
           getSucceededRequestsOrBuilderList() {
        if (succeededRequestsBuilder_ != null) {
          return succeededRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(succeededRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder() {
        return getSucceededRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder(
          int index) {
        return getSucceededRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder> 
           getSucceededRequestsBuilderList() {
        return getSucceededRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getSucceededRequestsFieldBuilder() {
        if (succeededRequestsBuilder_ == null) {
          succeededRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  succeededRequests_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          succeededRequests_ = null;
        }
        return succeededRequestsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_ =
        java.util.Collections.emptyList();
      private void ensureFailedRequestsIsMutable() {
        if (!((bitField0_ & 0x00000004) != 0)) {
          failedRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto>(failedRequests_);
          bitField0_ |= 0x00000004;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
        if (failedRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(failedRequests_);
        } else {
          return failedRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public int getFailedRequestsCount() {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.size();
        } else {
          return failedRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);
        } else {
          return failedRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public Builder addFailedRequests(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public Builder addAllFailedRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> values) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, failedRequests_);
          onChanged();
        } else {
          failedRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public Builder clearFailedRequests() {
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
          onChanged();
        } else {
          failedRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public Builder removeFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.remove(index);
          onChanged();
        } else {
          failedRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
          int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);  } else {
          return failedRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
           getFailedRequestsOrBuilderList() {
        if (failedRequestsBuilder_ != null) {
          return failedRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(failedRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() {
        return getFailedRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder> 
           getFailedRequestsBuilderList() {
        return getFailedRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
          getFailedRequestsFieldBuilder() {
        if (failedRequestsBuilder_ == null) {
          failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>(
                  failedRequests_,
                  ((bitField0_ & 0x00000004) != 0),
                  getParentForChildren(),
                  isClean());
          failedRequests_ = null;
        }
        return failedRequestsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StartContainersResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StartContainersResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StartContainersResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StartContainersResponseProto>() {
      @java.lang.Override
      public StartContainersResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StartContainersResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StartContainersResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StopContainersRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StopContainersRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> 
        getContainerIdList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    int getContainerIdCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getContainerIdOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.StopContainersRequestProto}
   */
  public static final class StopContainersRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StopContainersRequestProto)
      StopContainersRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StopContainersRequestProto.newBuilder() to construct.
    private StopContainersRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StopContainersRequestProto() {
      containerId_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StopContainersRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.Builder.class);
    }

    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> containerId_;
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getContainerIdList() {
      return containerId_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getContainerIdOrBuilderList() {
      return containerId_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public int getContainerIdCount() {
      return containerId_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) {
      return containerId_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(
        int index) {
      return containerId_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < containerId_.size(); i++) {
        output.writeMessage(1, containerId_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < containerId_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, containerId_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto) obj;

      if (!getContainerIdList()
          .equals(other.getContainerIdList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getContainerIdCount() > 0) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerIdList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StopContainersRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StopContainersRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (containerIdBuilder_ == null) {
          containerId_ = java.util.Collections.emptyList();
        } else {
          containerId_ = null;
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto result) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            containerId_ = java.util.Collections.unmodifiableList(containerId_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.containerId_ = containerId_;
        } else {
          result.containerId_ = containerIdBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.getDefaultInstance()) return this;
        if (containerIdBuilder_ == null) {
          if (!other.containerId_.isEmpty()) {
            if (containerId_.isEmpty()) {
              containerId_ = other.containerId_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureContainerIdIsMutable();
              containerId_.addAll(other.containerId_);
            }
            onChanged();
          }
        } else {
          if (!other.containerId_.isEmpty()) {
            if (containerIdBuilder_.isEmpty()) {
              containerIdBuilder_.dispose();
              containerIdBuilder_ = null;
              containerId_ = other.containerId_;
              bitField0_ = (bitField0_ & ~0x00000001);
              containerIdBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getContainerIdFieldBuilder() : null;
            } else {
              containerIdBuilder_.addAllMessages(other.containerId_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER,
                        extensionRegistry);
                if (containerIdBuilder_ == null) {
                  ensureContainerIdIsMutable();
                  containerId_.add(m);
                } else {
                  containerIdBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> containerId_ =
        java.util.Collections.emptyList();
      private void ensureContainerIdIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          containerId_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto>(containerId_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getContainerIdList() {
        if (containerIdBuilder_ == null) {
          return java.util.Collections.unmodifiableList(containerId_);
        } else {
          return containerIdBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public int getContainerIdCount() {
        if (containerIdBuilder_ == null) {
          return containerId_.size();
        } else {
          return containerIdBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) {
        if (containerIdBuilder_ == null) {
          return containerId_.get(index);
        } else {
          return containerIdBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIdIsMutable();
          containerId_.set(index, value);
          onChanged();
        } else {
          containerIdBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.set(index, builderForValue.build());
          onChanged();
        } else {
          containerIdBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIdIsMutable();
          containerId_.add(value);
          onChanged();
        } else {
          containerIdBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIdIsMutable();
          containerId_.add(index, value);
          onChanged();
        } else {
          containerIdBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.add(builderForValue.build());
          onChanged();
        } else {
          containerIdBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.add(index, builderForValue.build());
          onChanged();
        } else {
          containerIdBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addAllContainerId(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> values) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, containerId_);
          onChanged();
        } else {
          containerIdBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        if (containerIdBuilder_ == null) {
          containerId_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          containerIdBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder removeContainerId(int index) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.remove(index);
          onChanged();
        } else {
          containerIdBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder(
          int index) {
        return getContainerIdFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(
          int index) {
        if (containerIdBuilder_ == null) {
          return containerId_.get(index);  } else {
          return containerIdBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
           getContainerIdOrBuilderList() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(containerId_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder() {
        return getContainerIdFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder(
          int index) {
        return getContainerIdFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder> 
           getContainerIdBuilderList() {
        return getContainerIdFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  containerId_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StopContainersRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StopContainersRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StopContainersRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StopContainersRequestProto>() {
      @java.lang.Override
      public StopContainersRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StopContainersRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StopContainersRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StopContainersResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StopContainersResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> 
        getSucceededRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    int getSucceededRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getSucceededRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> 
        getFailedRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    int getFailedRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.StopContainersResponseProto}
   */
  public static final class StopContainersResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StopContainersResponseProto)
      StopContainersResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StopContainersResponseProto.newBuilder() to construct.
    private StopContainersResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StopContainersResponseProto() {
      succeededRequests_ = java.util.Collections.emptyList();
      failedRequests_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StopContainersResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.Builder.class);
    }

    public static final int SUCCEEDED_REQUESTS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> succeededRequests_;
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getSucceededRequestsList() {
      return succeededRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getSucceededRequestsOrBuilderList() {
      return succeededRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public int getSucceededRequestsCount() {
      return succeededRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) {
      return succeededRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
        int index) {
      return succeededRequests_.get(index);
    }

    public static final int FAILED_REQUESTS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_;
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public int getFailedRequestsCount() {
      return failedRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
      return failedRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index) {
      return failedRequests_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < succeededRequests_.size(); i++) {
        output.writeMessage(1, succeededRequests_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        output.writeMessage(2, failedRequests_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < succeededRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, succeededRequests_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, failedRequests_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto) obj;

      if (!getSucceededRequestsList()
          .equals(other.getSucceededRequestsList())) return false;
      if (!getFailedRequestsList()
          .equals(other.getFailedRequestsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getSucceededRequestsCount() > 0) {
        hash = (37 * hash) + SUCCEEDED_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getSucceededRequestsList().hashCode();
      }
      if (getFailedRequestsCount() > 0) {
        hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getFailedRequestsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StopContainersResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StopContainersResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (succeededRequestsBuilder_ == null) {
          succeededRequests_ = java.util.Collections.emptyList();
        } else {
          succeededRequests_ = null;
          succeededRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
        } else {
          failedRequests_ = null;
          failedRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto result) {
        if (succeededRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.succeededRequests_ = succeededRequests_;
        } else {
          result.succeededRequests_ = succeededRequestsBuilder_.build();
        }
        if (failedRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.failedRequests_ = failedRequests_;
        } else {
          result.failedRequests_ = failedRequestsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.getDefaultInstance()) return this;
        if (succeededRequestsBuilder_ == null) {
          if (!other.succeededRequests_.isEmpty()) {
            if (succeededRequests_.isEmpty()) {
              succeededRequests_ = other.succeededRequests_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureSucceededRequestsIsMutable();
              succeededRequests_.addAll(other.succeededRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.succeededRequests_.isEmpty()) {
            if (succeededRequestsBuilder_.isEmpty()) {
              succeededRequestsBuilder_.dispose();
              succeededRequestsBuilder_ = null;
              succeededRequests_ = other.succeededRequests_;
              bitField0_ = (bitField0_ & ~0x00000001);
              succeededRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getSucceededRequestsFieldBuilder() : null;
            } else {
              succeededRequestsBuilder_.addAllMessages(other.succeededRequests_);
            }
          }
        }
        if (failedRequestsBuilder_ == null) {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequests_.isEmpty()) {
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureFailedRequestsIsMutable();
              failedRequests_.addAll(other.failedRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequestsBuilder_.isEmpty()) {
              failedRequestsBuilder_.dispose();
              failedRequestsBuilder_ = null;
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
              failedRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getFailedRequestsFieldBuilder() : null;
            } else {
              failedRequestsBuilder_.addAllMessages(other.failedRequests_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER,
                        extensionRegistry);
                if (succeededRequestsBuilder_ == null) {
                  ensureSucceededRequestsIsMutable();
                  succeededRequests_.add(m);
                } else {
                  succeededRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER,
                        extensionRegistry);
                if (failedRequestsBuilder_ == null) {
                  ensureFailedRequestsIsMutable();
                  failedRequests_.add(m);
                } else {
                  failedRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> succeededRequests_ =
        java.util.Collections.emptyList();
      private void ensureSucceededRequestsIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          succeededRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto>(succeededRequests_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> succeededRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getSucceededRequestsList() {
        if (succeededRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(succeededRequests_);
        } else {
          return succeededRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public int getSucceededRequestsCount() {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.size();
        } else {
          return succeededRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.get(index);
        } else {
          return succeededRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder setSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.set(index, value);
          onChanged();
        } else {
          succeededRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder setSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(value);
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(index, value);
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addAllSucceededRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> values) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, succeededRequests_);
          onChanged();
        } else {
          succeededRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder clearSucceededRequests() {
        if (succeededRequestsBuilder_ == null) {
          succeededRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          succeededRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder removeSucceededRequests(int index) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.remove(index);
          onChanged();
        } else {
          succeededRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getSucceededRequestsBuilder(
          int index) {
        return getSucceededRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
          int index) {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.get(index);  } else {
          return succeededRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
           getSucceededRequestsOrBuilderList() {
        if (succeededRequestsBuilder_ != null) {
          return succeededRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(succeededRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder() {
        return getSucceededRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder(
          int index) {
        return getSucceededRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder> 
           getSucceededRequestsBuilderList() {
        return getSucceededRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getSucceededRequestsFieldBuilder() {
        if (succeededRequestsBuilder_ == null) {
          succeededRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  succeededRequests_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          succeededRequests_ = null;
        }
        return succeededRequestsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_ =
        java.util.Collections.emptyList();
      private void ensureFailedRequestsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          failedRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto>(failedRequests_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
        if (failedRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(failedRequests_);
        } else {
          return failedRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public int getFailedRequestsCount() {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.size();
        } else {
          return failedRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);
        } else {
          return failedRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addAllFailedRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> values) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, failedRequests_);
          onChanged();
        } else {
          failedRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder clearFailedRequests() {
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          failedRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder removeFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.remove(index);
          onChanged();
        } else {
          failedRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
          int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);  } else {
          return failedRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
           getFailedRequestsOrBuilderList() {
        if (failedRequestsBuilder_ != null) {
          return failedRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(failedRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() {
        return getFailedRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder> 
           getFailedRequestsBuilderList() {
        return getFailedRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
          getFailedRequestsFieldBuilder() {
        if (failedRequestsBuilder_ == null) {
          failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>(
                  failedRequests_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          failedRequests_ = null;
        }
        return failedRequestsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StopContainersResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StopContainersResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StopContainersResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StopContainersResponseProto>() {
      @java.lang.Override
      public StopContainersResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StopContainersResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StopContainersResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetContainerStatusesRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainerStatusesRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> 
        getContainerIdList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    int getContainerIdCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getContainerIdOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetContainerStatusesRequestProto}
   */
  public static final class GetContainerStatusesRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainerStatusesRequestProto)
      GetContainerStatusesRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetContainerStatusesRequestProto.newBuilder() to construct.
    private GetContainerStatusesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetContainerStatusesRequestProto() {
      containerId_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetContainerStatusesRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.Builder.class);
    }

    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> containerId_;
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getContainerIdList() {
      return containerId_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getContainerIdOrBuilderList() {
      return containerId_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public int getContainerIdCount() {
      return containerId_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) {
      return containerId_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(
        int index) {
      return containerId_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < containerId_.size(); i++) {
        output.writeMessage(1, containerId_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < containerId_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, containerId_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto) obj;

      if (!getContainerIdList()
          .equals(other.getContainerIdList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getContainerIdCount() > 0) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerIdList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetContainerStatusesRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainerStatusesRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (containerIdBuilder_ == null) {
          containerId_ = java.util.Collections.emptyList();
        } else {
          containerId_ = null;
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto result) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            containerId_ = java.util.Collections.unmodifiableList(containerId_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.containerId_ = containerId_;
        } else {
          result.containerId_ = containerIdBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.getDefaultInstance()) return this;
        if (containerIdBuilder_ == null) {
          if (!other.containerId_.isEmpty()) {
            if (containerId_.isEmpty()) {
              containerId_ = other.containerId_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureContainerIdIsMutable();
              containerId_.addAll(other.containerId_);
            }
            onChanged();
          }
        } else {
          if (!other.containerId_.isEmpty()) {
            if (containerIdBuilder_.isEmpty()) {
              containerIdBuilder_.dispose();
              containerIdBuilder_ = null;
              containerId_ = other.containerId_;
              bitField0_ = (bitField0_ & ~0x00000001);
              containerIdBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getContainerIdFieldBuilder() : null;
            } else {
              containerIdBuilder_.addAllMessages(other.containerId_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER,
                        extensionRegistry);
                if (containerIdBuilder_ == null) {
                  ensureContainerIdIsMutable();
                  containerId_.add(m);
                } else {
                  containerIdBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> containerId_ =
        java.util.Collections.emptyList();
      private void ensureContainerIdIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          containerId_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto>(containerId_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getContainerIdList() {
        if (containerIdBuilder_ == null) {
          return java.util.Collections.unmodifiableList(containerId_);
        } else {
          return containerIdBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public int getContainerIdCount() {
        if (containerIdBuilder_ == null) {
          return containerId_.size();
        } else {
          return containerIdBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) {
        if (containerIdBuilder_ == null) {
          return containerId_.get(index);
        } else {
          return containerIdBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIdIsMutable();
          containerId_.set(index, value);
          onChanged();
        } else {
          containerIdBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.set(index, builderForValue.build());
          onChanged();
        } else {
          containerIdBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIdIsMutable();
          containerId_.add(value);
          onChanged();
        } else {
          containerIdBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIdIsMutable();
          containerId_.add(index, value);
          onChanged();
        } else {
          containerIdBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.add(builderForValue.build());
          onChanged();
        } else {
          containerIdBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.add(index, builderForValue.build());
          onChanged();
        } else {
          containerIdBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addAllContainerId(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> values) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, containerId_);
          onChanged();
        } else {
          containerIdBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        if (containerIdBuilder_ == null) {
          containerId_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          containerIdBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder removeContainerId(int index) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.remove(index);
          onChanged();
        } else {
          containerIdBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder(
          int index) {
        return getContainerIdFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(
          int index) {
        if (containerIdBuilder_ == null) {
          return containerId_.get(index);  } else {
          return containerIdBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
           getContainerIdOrBuilderList() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(containerId_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder() {
        return getContainerIdFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder(
          int index) {
        return getContainerIdFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder> 
           getContainerIdBuilderList() {
        return getContainerIdFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  containerId_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainerStatusesRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainerStatusesRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerStatusesRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetContainerStatusesRequestProto>() {
      @java.lang.Override
      public GetContainerStatusesRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerStatusesRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerStatusesRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetContainerStatusesResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainerStatusesResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> 
        getStatusList();
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getStatus(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
     */
    int getStatusCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> 
        getStatusOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getStatusOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> 
        getFailedRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    int getFailedRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetContainerStatusesResponseProto}
   */
  public static final class GetContainerStatusesResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainerStatusesResponseProto)
      GetContainerStatusesResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetContainerStatusesResponseProto.newBuilder() to construct.
    private GetContainerStatusesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetContainerStatusesResponseProto() {
      status_ = java.util.Collections.emptyList();
      failedRequests_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetContainerStatusesResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.Builder.class);
    }

    public static final int STATUS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> status_;
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> getStatusList() {
      return status_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> 
        getStatusOrBuilderList() {
      return status_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
     */
    @java.lang.Override
    public int getStatusCount() {
      return status_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getStatus(int index) {
      return status_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getStatusOrBuilder(
        int index) {
      return status_.get(index);
    }

    public static final int FAILED_REQUESTS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_;
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public int getFailedRequestsCount() {
      return failedRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
      return failedRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index) {
      return failedRequests_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getStatusCount(); i++) {
        if (!getStatus(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < status_.size(); i++) {
        output.writeMessage(1, status_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        output.writeMessage(2, failedRequests_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < status_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, status_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, failedRequests_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto) obj;

      if (!getStatusList()
          .equals(other.getStatusList())) return false;
      if (!getFailedRequestsList()
          .equals(other.getFailedRequestsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getStatusCount() > 0) {
        hash = (37 * hash) + STATUS_FIELD_NUMBER;
        hash = (53 * hash) + getStatusList().hashCode();
      }
      if (getFailedRequestsCount() > 0) {
        hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getFailedRequestsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetContainerStatusesResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainerStatusesResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (statusBuilder_ == null) {
          status_ = java.util.Collections.emptyList();
        } else {
          status_ = null;
          statusBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
        } else {
          failedRequests_ = null;
          failedRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto result) {
        if (statusBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            status_ = java.util.Collections.unmodifiableList(status_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.status_ = status_;
        } else {
          result.status_ = statusBuilder_.build();
        }
        if (failedRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.failedRequests_ = failedRequests_;
        } else {
          result.failedRequests_ = failedRequestsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.getDefaultInstance()) return this;
        if (statusBuilder_ == null) {
          if (!other.status_.isEmpty()) {
            if (status_.isEmpty()) {
              status_ = other.status_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureStatusIsMutable();
              status_.addAll(other.status_);
            }
            onChanged();
          }
        } else {
          if (!other.status_.isEmpty()) {
            if (statusBuilder_.isEmpty()) {
              statusBuilder_.dispose();
              statusBuilder_ = null;
              status_ = other.status_;
              bitField0_ = (bitField0_ & ~0x00000001);
              statusBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getStatusFieldBuilder() : null;
            } else {
              statusBuilder_.addAllMessages(other.status_);
            }
          }
        }
        if (failedRequestsBuilder_ == null) {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequests_.isEmpty()) {
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureFailedRequestsIsMutable();
              failedRequests_.addAll(other.failedRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequestsBuilder_.isEmpty()) {
              failedRequestsBuilder_.dispose();
              failedRequestsBuilder_ = null;
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
              failedRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getFailedRequestsFieldBuilder() : null;
            } else {
              failedRequestsBuilder_.addAllMessages(other.failedRequests_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getStatusCount(); i++) {
          if (!getStatus(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.PARSER,
                        extensionRegistry);
                if (statusBuilder_ == null) {
                  ensureStatusIsMutable();
                  status_.add(m);
                } else {
                  statusBuilder_.addMessage(m);
                }
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER,
                        extensionRegistry);
                if (failedRequestsBuilder_ == null) {
                  ensureFailedRequestsIsMutable();
                  failedRequests_.add(m);
                } else {
                  failedRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> status_ =
        java.util.Collections.emptyList();
      private void ensureStatusIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          status_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto>(status_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> statusBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> getStatusList() {
        if (statusBuilder_ == null) {
          return java.util.Collections.unmodifiableList(status_);
        } else {
          return statusBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public int getStatusCount() {
        if (statusBuilder_ == null) {
          return status_.size();
        } else {
          return statusBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getStatus(int index) {
        if (statusBuilder_ == null) {
          return status_.get(index);
        } else {
          return statusBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public Builder setStatus(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) {
        if (statusBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureStatusIsMutable();
          status_.set(index, value);
          onChanged();
        } else {
          statusBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public Builder setStatus(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) {
        if (statusBuilder_ == null) {
          ensureStatusIsMutable();
          status_.set(index, builderForValue.build());
          onChanged();
        } else {
          statusBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public Builder addStatus(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) {
        if (statusBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureStatusIsMutable();
          status_.add(value);
          onChanged();
        } else {
          statusBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public Builder addStatus(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) {
        if (statusBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureStatusIsMutable();
          status_.add(index, value);
          onChanged();
        } else {
          statusBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public Builder addStatus(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) {
        if (statusBuilder_ == null) {
          ensureStatusIsMutable();
          status_.add(builderForValue.build());
          onChanged();
        } else {
          statusBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public Builder addStatus(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) {
        if (statusBuilder_ == null) {
          ensureStatusIsMutable();
          status_.add(index, builderForValue.build());
          onChanged();
        } else {
          statusBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public Builder addAllStatus(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto> values) {
        if (statusBuilder_ == null) {
          ensureStatusIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, status_);
          onChanged();
        } else {
          statusBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public Builder clearStatus() {
        if (statusBuilder_ == null) {
          status_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          statusBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public Builder removeStatus(int index) {
        if (statusBuilder_ == null) {
          ensureStatusIsMutable();
          status_.remove(index);
          onChanged();
        } else {
          statusBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder getStatusBuilder(
          int index) {
        return getStatusFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getStatusOrBuilder(
          int index) {
        if (statusBuilder_ == null) {
          return status_.get(index);  } else {
          return statusBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> 
           getStatusOrBuilderList() {
        if (statusBuilder_ != null) {
          return statusBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(status_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder addStatusBuilder() {
        return getStatusFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder addStatusBuilder(
          int index) {
        return getStatusFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerStatusProto status = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder> 
           getStatusBuilderList() {
        return getStatusFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> 
          getStatusFieldBuilder() {
        if (statusBuilder_ == null) {
          statusBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder>(
                  status_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          status_ = null;
        }
        return statusBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_ =
        java.util.Collections.emptyList();
      private void ensureFailedRequestsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          failedRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto>(failedRequests_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
        if (failedRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(failedRequests_);
        } else {
          return failedRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public int getFailedRequestsCount() {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.size();
        } else {
          return failedRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);
        } else {
          return failedRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addAllFailedRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> values) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, failedRequests_);
          onChanged();
        } else {
          failedRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder clearFailedRequests() {
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          failedRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder removeFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.remove(index);
          onChanged();
        } else {
          failedRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
          int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);  } else {
          return failedRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
           getFailedRequestsOrBuilderList() {
        if (failedRequestsBuilder_ != null) {
          return failedRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(failedRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() {
        return getFailedRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder> 
           getFailedRequestsBuilderList() {
        return getFailedRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
          getFailedRequestsFieldBuilder() {
        if (failedRequestsBuilder_ == null) {
          failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>(
                  failedRequests_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          failedRequests_ = null;
        }
        return failedRequestsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainerStatusesResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainerStatusesResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerStatusesResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetContainerStatusesResponseProto>() {
      @java.lang.Override
      public GetContainerStatusesResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerStatusesResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerStatusesResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface IncreaseContainersResourceRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.IncreaseContainersResourceRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
     */
    java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto> 
        getIncreaseContainersList();
    /**
     * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProto getIncreaseContainers(int index);
    /**
     * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
     */
    int getIncreaseContainersCount();
    /**
     * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
        getIncreaseContainersOrBuilderList();
    /**
     * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getIncreaseContainersOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.IncreaseContainersResourceRequestProto}
   */
  public static final class IncreaseContainersResourceRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.IncreaseContainersResourceRequestProto)
      IncreaseContainersResourceRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use IncreaseContainersResourceRequestProto.newBuilder() to construct.
    private IncreaseContainersResourceRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private IncreaseContainersResourceRequestProto() {
      increaseContainers_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new IncreaseContainersResourceRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.Builder.class);
    }

    public static final int INCREASE_CONTAINERS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto> increaseContainers_;
    /**
     * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto> getIncreaseContainersList() {
      return increaseContainers_;
    }
    /**
     * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
        getIncreaseContainersOrBuilderList() {
      return increaseContainers_;
    }
    /**
     * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
     */
    @java.lang.Override
    public int getIncreaseContainersCount() {
      return increaseContainers_.size();
    }
    /**
     * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getIncreaseContainers(int index) {
      return increaseContainers_.get(index);
    }
    /**
     * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getIncreaseContainersOrBuilder(
        int index) {
      return increaseContainers_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getIncreaseContainersCount(); i++) {
        if (!getIncreaseContainers(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < increaseContainers_.size(); i++) {
        output.writeMessage(1, increaseContainers_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < increaseContainers_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, increaseContainers_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto) obj;

      if (!getIncreaseContainersList()
          .equals(other.getIncreaseContainersList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getIncreaseContainersCount() > 0) {
        hash = (37 * hash) + INCREASE_CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getIncreaseContainersList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.IncreaseContainersResourceRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.IncreaseContainersResourceRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (increaseContainersBuilder_ == null) {
          increaseContainers_ = java.util.Collections.emptyList();
        } else {
          increaseContainers_ = null;
          increaseContainersBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto result) {
        if (increaseContainersBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            increaseContainers_ = java.util.Collections.unmodifiableList(increaseContainers_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.increaseContainers_ = increaseContainers_;
        } else {
          result.increaseContainers_ = increaseContainersBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.getDefaultInstance()) return this;
        if (increaseContainersBuilder_ == null) {
          if (!other.increaseContainers_.isEmpty()) {
            if (increaseContainers_.isEmpty()) {
              increaseContainers_ = other.increaseContainers_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureIncreaseContainersIsMutable();
              increaseContainers_.addAll(other.increaseContainers_);
            }
            onChanged();
          }
        } else {
          if (!other.increaseContainers_.isEmpty()) {
            if (increaseContainersBuilder_.isEmpty()) {
              increaseContainersBuilder_.dispose();
              increaseContainersBuilder_ = null;
              increaseContainers_ = other.increaseContainers_;
              bitField0_ = (bitField0_ & ~0x00000001);
              increaseContainersBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getIncreaseContainersFieldBuilder() : null;
            } else {
              increaseContainersBuilder_.addAllMessages(other.increaseContainers_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getIncreaseContainersCount(); i++) {
          if (!getIncreaseContainers(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.security.proto.SecurityProtos.TokenProto m =
                    input.readMessage(
                        org.apache.hadoop.security.proto.SecurityProtos.TokenProto.PARSER,
                        extensionRegistry);
                if (increaseContainersBuilder_ == null) {
                  ensureIncreaseContainersIsMutable();
                  increaseContainers_.add(m);
                } else {
                  increaseContainersBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto> increaseContainers_ =
        java.util.Collections.emptyList();
      private void ensureIncreaseContainersIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          increaseContainers_ = new java.util.ArrayList<org.apache.hadoop.security.proto.SecurityProtos.TokenProto>(increaseContainers_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> increaseContainersBuilder_;

      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto> getIncreaseContainersList() {
        if (increaseContainersBuilder_ == null) {
          return java.util.Collections.unmodifiableList(increaseContainers_);
        } else {
          return increaseContainersBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public int getIncreaseContainersCount() {
        if (increaseContainersBuilder_ == null) {
          return increaseContainers_.size();
        } else {
          return increaseContainersBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getIncreaseContainers(int index) {
        if (increaseContainersBuilder_ == null) {
          return increaseContainers_.get(index);
        } else {
          return increaseContainersBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public Builder setIncreaseContainers(
          int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (increaseContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureIncreaseContainersIsMutable();
          increaseContainers_.set(index, value);
          onChanged();
        } else {
          increaseContainersBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public Builder setIncreaseContainers(
          int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (increaseContainersBuilder_ == null) {
          ensureIncreaseContainersIsMutable();
          increaseContainers_.set(index, builderForValue.build());
          onChanged();
        } else {
          increaseContainersBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public Builder addIncreaseContainers(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (increaseContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureIncreaseContainersIsMutable();
          increaseContainers_.add(value);
          onChanged();
        } else {
          increaseContainersBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public Builder addIncreaseContainers(
          int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (increaseContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureIncreaseContainersIsMutable();
          increaseContainers_.add(index, value);
          onChanged();
        } else {
          increaseContainersBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public Builder addIncreaseContainers(
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (increaseContainersBuilder_ == null) {
          ensureIncreaseContainersIsMutable();
          increaseContainers_.add(builderForValue.build());
          onChanged();
        } else {
          increaseContainersBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public Builder addIncreaseContainers(
          int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (increaseContainersBuilder_ == null) {
          ensureIncreaseContainersIsMutable();
          increaseContainers_.add(index, builderForValue.build());
          onChanged();
        } else {
          increaseContainersBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public Builder addAllIncreaseContainers(
          java.lang.Iterable<? extends org.apache.hadoop.security.proto.SecurityProtos.TokenProto> values) {
        if (increaseContainersBuilder_ == null) {
          ensureIncreaseContainersIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, increaseContainers_);
          onChanged();
        } else {
          increaseContainersBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public Builder clearIncreaseContainers() {
        if (increaseContainersBuilder_ == null) {
          increaseContainers_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          increaseContainersBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public Builder removeIncreaseContainers(int index) {
        if (increaseContainersBuilder_ == null) {
          ensureIncreaseContainersIsMutable();
          increaseContainers_.remove(index);
          onChanged();
        } else {
          increaseContainersBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getIncreaseContainersBuilder(
          int index) {
        return getIncreaseContainersFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getIncreaseContainersOrBuilder(
          int index) {
        if (increaseContainersBuilder_ == null) {
          return increaseContainers_.get(index);  } else {
          return increaseContainersBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
           getIncreaseContainersOrBuilderList() {
        if (increaseContainersBuilder_ != null) {
          return increaseContainersBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(increaseContainers_);
        }
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder addIncreaseContainersBuilder() {
        return getIncreaseContainersFieldBuilder().addBuilder(
            org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder addIncreaseContainersBuilder(
          int index) {
        return getIncreaseContainersFieldBuilder().addBuilder(
            index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.common.TokenProto increase_containers = 1;</code>
       */
      public java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder> 
           getIncreaseContainersBuilderList() {
        return getIncreaseContainersFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
          getIncreaseContainersFieldBuilder() {
        if (increaseContainersBuilder_ == null) {
          increaseContainersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>(
                  increaseContainers_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          increaseContainers_ = null;
        }
        return increaseContainersBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.IncreaseContainersResourceRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.IncreaseContainersResourceRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<IncreaseContainersResourceRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<IncreaseContainersResourceRequestProto>() {
      @java.lang.Override
      public IncreaseContainersResourceRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<IncreaseContainersResourceRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<IncreaseContainersResourceRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface IncreaseContainersResourceResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.IncreaseContainersResourceResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> 
        getSucceededRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    int getSucceededRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getSucceededRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> 
        getFailedRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    int getFailedRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.IncreaseContainersResourceResponseProto}
   */
  public static final class IncreaseContainersResourceResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.IncreaseContainersResourceResponseProto)
      IncreaseContainersResourceResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use IncreaseContainersResourceResponseProto.newBuilder() to construct.
    private IncreaseContainersResourceResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private IncreaseContainersResourceResponseProto() {
      succeededRequests_ = java.util.Collections.emptyList();
      failedRequests_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new IncreaseContainersResourceResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.Builder.class);
    }

    public static final int SUCCEEDED_REQUESTS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> succeededRequests_;
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getSucceededRequestsList() {
      return succeededRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getSucceededRequestsOrBuilderList() {
      return succeededRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public int getSucceededRequestsCount() {
      return succeededRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) {
      return succeededRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
        int index) {
      return succeededRequests_.get(index);
    }

    public static final int FAILED_REQUESTS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_;
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public int getFailedRequestsCount() {
      return failedRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
      return failedRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index) {
      return failedRequests_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < succeededRequests_.size(); i++) {
        output.writeMessage(1, succeededRequests_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        output.writeMessage(2, failedRequests_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < succeededRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, succeededRequests_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, failedRequests_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto) obj;

      if (!getSucceededRequestsList()
          .equals(other.getSucceededRequestsList())) return false;
      if (!getFailedRequestsList()
          .equals(other.getFailedRequestsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getSucceededRequestsCount() > 0) {
        hash = (37 * hash) + SUCCEEDED_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getSucceededRequestsList().hashCode();
      }
      if (getFailedRequestsCount() > 0) {
        hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getFailedRequestsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.IncreaseContainersResourceResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.IncreaseContainersResourceResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (succeededRequestsBuilder_ == null) {
          succeededRequests_ = java.util.Collections.emptyList();
        } else {
          succeededRequests_ = null;
          succeededRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
        } else {
          failedRequests_ = null;
          failedRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto result) {
        if (succeededRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.succeededRequests_ = succeededRequests_;
        } else {
          result.succeededRequests_ = succeededRequestsBuilder_.build();
        }
        if (failedRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.failedRequests_ = failedRequests_;
        } else {
          result.failedRequests_ = failedRequestsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.getDefaultInstance()) return this;
        if (succeededRequestsBuilder_ == null) {
          if (!other.succeededRequests_.isEmpty()) {
            if (succeededRequests_.isEmpty()) {
              succeededRequests_ = other.succeededRequests_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureSucceededRequestsIsMutable();
              succeededRequests_.addAll(other.succeededRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.succeededRequests_.isEmpty()) {
            if (succeededRequestsBuilder_.isEmpty()) {
              succeededRequestsBuilder_.dispose();
              succeededRequestsBuilder_ = null;
              succeededRequests_ = other.succeededRequests_;
              bitField0_ = (bitField0_ & ~0x00000001);
              succeededRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getSucceededRequestsFieldBuilder() : null;
            } else {
              succeededRequestsBuilder_.addAllMessages(other.succeededRequests_);
            }
          }
        }
        if (failedRequestsBuilder_ == null) {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequests_.isEmpty()) {
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureFailedRequestsIsMutable();
              failedRequests_.addAll(other.failedRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequestsBuilder_.isEmpty()) {
              failedRequestsBuilder_.dispose();
              failedRequestsBuilder_ = null;
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
              failedRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getFailedRequestsFieldBuilder() : null;
            } else {
              failedRequestsBuilder_.addAllMessages(other.failedRequests_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER,
                        extensionRegistry);
                if (succeededRequestsBuilder_ == null) {
                  ensureSucceededRequestsIsMutable();
                  succeededRequests_.add(m);
                } else {
                  succeededRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER,
                        extensionRegistry);
                if (failedRequestsBuilder_ == null) {
                  ensureFailedRequestsIsMutable();
                  failedRequests_.add(m);
                } else {
                  failedRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> succeededRequests_ =
        java.util.Collections.emptyList();
      private void ensureSucceededRequestsIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          succeededRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto>(succeededRequests_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> succeededRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getSucceededRequestsList() {
        if (succeededRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(succeededRequests_);
        } else {
          return succeededRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public int getSucceededRequestsCount() {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.size();
        } else {
          return succeededRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.get(index);
        } else {
          return succeededRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder setSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.set(index, value);
          onChanged();
        } else {
          succeededRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder setSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(value);
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(index, value);
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addAllSucceededRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> values) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, succeededRequests_);
          onChanged();
        } else {
          succeededRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder clearSucceededRequests() {
        if (succeededRequestsBuilder_ == null) {
          succeededRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          succeededRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder removeSucceededRequests(int index) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.remove(index);
          onChanged();
        } else {
          succeededRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getSucceededRequestsBuilder(
          int index) {
        return getSucceededRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
          int index) {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.get(index);  } else {
          return succeededRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
           getSucceededRequestsOrBuilderList() {
        if (succeededRequestsBuilder_ != null) {
          return succeededRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(succeededRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder() {
        return getSucceededRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder(
          int index) {
        return getSucceededRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder> 
           getSucceededRequestsBuilderList() {
        return getSucceededRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getSucceededRequestsFieldBuilder() {
        if (succeededRequestsBuilder_ == null) {
          succeededRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  succeededRequests_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          succeededRequests_ = null;
        }
        return succeededRequestsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_ =
        java.util.Collections.emptyList();
      private void ensureFailedRequestsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          failedRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto>(failedRequests_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
        if (failedRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(failedRequests_);
        } else {
          return failedRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public int getFailedRequestsCount() {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.size();
        } else {
          return failedRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);
        } else {
          return failedRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addAllFailedRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> values) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, failedRequests_);
          onChanged();
        } else {
          failedRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder clearFailedRequests() {
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          failedRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder removeFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.remove(index);
          onChanged();
        } else {
          failedRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
          int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);  } else {
          return failedRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
           getFailedRequestsOrBuilderList() {
        if (failedRequestsBuilder_ != null) {
          return failedRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(failedRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() {
        return getFailedRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder> 
           getFailedRequestsBuilderList() {
        return getFailedRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
          getFailedRequestsFieldBuilder() {
        if (failedRequestsBuilder_ == null) {
          failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>(
                  failedRequests_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          failedRequests_ = null;
        }
        return failedRequestsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.IncreaseContainersResourceResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.IncreaseContainersResourceResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<IncreaseContainersResourceResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<IncreaseContainersResourceResponseProto>() {
      @java.lang.Override
      public IncreaseContainersResourceResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<IncreaseContainersResourceResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<IncreaseContainersResourceResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerUpdateRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerUpdateRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
     */
    java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto> 
        getUpdateContainerTokenList();
    /**
     * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProto getUpdateContainerToken(int index);
    /**
     * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
     */
    int getUpdateContainerTokenCount();
    /**
     * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
        getUpdateContainerTokenOrBuilderList();
    /**
     * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getUpdateContainerTokenOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerUpdateRequestProto}
   */
  public static final class ContainerUpdateRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerUpdateRequestProto)
      ContainerUpdateRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerUpdateRequestProto.newBuilder() to construct.
    private ContainerUpdateRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerUpdateRequestProto() {
      updateContainerToken_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerUpdateRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.Builder.class);
    }

    public static final int UPDATE_CONTAINER_TOKEN_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto> updateContainerToken_;
    /**
     * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto> getUpdateContainerTokenList() {
      return updateContainerToken_;
    }
    /**
     * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
        getUpdateContainerTokenOrBuilderList() {
      return updateContainerToken_;
    }
    /**
     * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
     */
    @java.lang.Override
    public int getUpdateContainerTokenCount() {
      return updateContainerToken_.size();
    }
    /**
     * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getUpdateContainerToken(int index) {
      return updateContainerToken_.get(index);
    }
    /**
     * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getUpdateContainerTokenOrBuilder(
        int index) {
      return updateContainerToken_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getUpdateContainerTokenCount(); i++) {
        if (!getUpdateContainerToken(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < updateContainerToken_.size(); i++) {
        output.writeMessage(1, updateContainerToken_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < updateContainerToken_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, updateContainerToken_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto) obj;

      if (!getUpdateContainerTokenList()
          .equals(other.getUpdateContainerTokenList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getUpdateContainerTokenCount() > 0) {
        hash = (37 * hash) + UPDATE_CONTAINER_TOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getUpdateContainerTokenList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerUpdateRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerUpdateRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (updateContainerTokenBuilder_ == null) {
          updateContainerToken_ = java.util.Collections.emptyList();
        } else {
          updateContainerToken_ = null;
          updateContainerTokenBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto result) {
        if (updateContainerTokenBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            updateContainerToken_ = java.util.Collections.unmodifiableList(updateContainerToken_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.updateContainerToken_ = updateContainerToken_;
        } else {
          result.updateContainerToken_ = updateContainerTokenBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.getDefaultInstance()) return this;
        if (updateContainerTokenBuilder_ == null) {
          if (!other.updateContainerToken_.isEmpty()) {
            if (updateContainerToken_.isEmpty()) {
              updateContainerToken_ = other.updateContainerToken_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureUpdateContainerTokenIsMutable();
              updateContainerToken_.addAll(other.updateContainerToken_);
            }
            onChanged();
          }
        } else {
          if (!other.updateContainerToken_.isEmpty()) {
            if (updateContainerTokenBuilder_.isEmpty()) {
              updateContainerTokenBuilder_.dispose();
              updateContainerTokenBuilder_ = null;
              updateContainerToken_ = other.updateContainerToken_;
              bitField0_ = (bitField0_ & ~0x00000001);
              updateContainerTokenBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getUpdateContainerTokenFieldBuilder() : null;
            } else {
              updateContainerTokenBuilder_.addAllMessages(other.updateContainerToken_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getUpdateContainerTokenCount(); i++) {
          if (!getUpdateContainerToken(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.security.proto.SecurityProtos.TokenProto m =
                    input.readMessage(
                        org.apache.hadoop.security.proto.SecurityProtos.TokenProto.PARSER,
                        extensionRegistry);
                if (updateContainerTokenBuilder_ == null) {
                  ensureUpdateContainerTokenIsMutable();
                  updateContainerToken_.add(m);
                } else {
                  updateContainerTokenBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto> updateContainerToken_ =
        java.util.Collections.emptyList();
      private void ensureUpdateContainerTokenIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          updateContainerToken_ = new java.util.ArrayList<org.apache.hadoop.security.proto.SecurityProtos.TokenProto>(updateContainerToken_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> updateContainerTokenBuilder_;

      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto> getUpdateContainerTokenList() {
        if (updateContainerTokenBuilder_ == null) {
          return java.util.Collections.unmodifiableList(updateContainerToken_);
        } else {
          return updateContainerTokenBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public int getUpdateContainerTokenCount() {
        if (updateContainerTokenBuilder_ == null) {
          return updateContainerToken_.size();
        } else {
          return updateContainerTokenBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getUpdateContainerToken(int index) {
        if (updateContainerTokenBuilder_ == null) {
          return updateContainerToken_.get(index);
        } else {
          return updateContainerTokenBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public Builder setUpdateContainerToken(
          int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (updateContainerTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdateContainerTokenIsMutable();
          updateContainerToken_.set(index, value);
          onChanged();
        } else {
          updateContainerTokenBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public Builder setUpdateContainerToken(
          int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (updateContainerTokenBuilder_ == null) {
          ensureUpdateContainerTokenIsMutable();
          updateContainerToken_.set(index, builderForValue.build());
          onChanged();
        } else {
          updateContainerTokenBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public Builder addUpdateContainerToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (updateContainerTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdateContainerTokenIsMutable();
          updateContainerToken_.add(value);
          onChanged();
        } else {
          updateContainerTokenBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public Builder addUpdateContainerToken(
          int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (updateContainerTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureUpdateContainerTokenIsMutable();
          updateContainerToken_.add(index, value);
          onChanged();
        } else {
          updateContainerTokenBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public Builder addUpdateContainerToken(
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (updateContainerTokenBuilder_ == null) {
          ensureUpdateContainerTokenIsMutable();
          updateContainerToken_.add(builderForValue.build());
          onChanged();
        } else {
          updateContainerTokenBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public Builder addUpdateContainerToken(
          int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (updateContainerTokenBuilder_ == null) {
          ensureUpdateContainerTokenIsMutable();
          updateContainerToken_.add(index, builderForValue.build());
          onChanged();
        } else {
          updateContainerTokenBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public Builder addAllUpdateContainerToken(
          java.lang.Iterable<? extends org.apache.hadoop.security.proto.SecurityProtos.TokenProto> values) {
        if (updateContainerTokenBuilder_ == null) {
          ensureUpdateContainerTokenIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, updateContainerToken_);
          onChanged();
        } else {
          updateContainerTokenBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public Builder clearUpdateContainerToken() {
        if (updateContainerTokenBuilder_ == null) {
          updateContainerToken_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          updateContainerTokenBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public Builder removeUpdateContainerToken(int index) {
        if (updateContainerTokenBuilder_ == null) {
          ensureUpdateContainerTokenIsMutable();
          updateContainerToken_.remove(index);
          onChanged();
        } else {
          updateContainerTokenBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getUpdateContainerTokenBuilder(
          int index) {
        return getUpdateContainerTokenFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getUpdateContainerTokenOrBuilder(
          int index) {
        if (updateContainerTokenBuilder_ == null) {
          return updateContainerToken_.get(index);  } else {
          return updateContainerTokenBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
           getUpdateContainerTokenOrBuilderList() {
        if (updateContainerTokenBuilder_ != null) {
          return updateContainerTokenBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(updateContainerToken_);
        }
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder addUpdateContainerTokenBuilder() {
        return getUpdateContainerTokenFieldBuilder().addBuilder(
            org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder addUpdateContainerTokenBuilder(
          int index) {
        return getUpdateContainerTokenFieldBuilder().addBuilder(
            index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.common.TokenProto update_container_token = 1;</code>
       */
      public java.util.List<org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder> 
           getUpdateContainerTokenBuilderList() {
        return getUpdateContainerTokenFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
          getUpdateContainerTokenFieldBuilder() {
        if (updateContainerTokenBuilder_ == null) {
          updateContainerTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>(
                  updateContainerToken_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          updateContainerToken_ = null;
        }
        return updateContainerTokenBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerUpdateRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerUpdateRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerUpdateRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerUpdateRequestProto>() {
      @java.lang.Override
      public ContainerUpdateRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerUpdateRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerUpdateRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerUpdateResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerUpdateResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> 
        getSucceededRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    int getSucceededRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getSucceededRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> 
        getFailedRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    int getFailedRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerUpdateResponseProto}
   */
  public static final class ContainerUpdateResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerUpdateResponseProto)
      ContainerUpdateResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerUpdateResponseProto.newBuilder() to construct.
    private ContainerUpdateResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerUpdateResponseProto() {
      succeededRequests_ = java.util.Collections.emptyList();
      failedRequests_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerUpdateResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.Builder.class);
    }

    public static final int SUCCEEDED_REQUESTS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> succeededRequests_;
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getSucceededRequestsList() {
      return succeededRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getSucceededRequestsOrBuilderList() {
      return succeededRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public int getSucceededRequestsCount() {
      return succeededRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) {
      return succeededRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
        int index) {
      return succeededRequests_.get(index);
    }

    public static final int FAILED_REQUESTS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_;
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public int getFailedRequestsCount() {
      return failedRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
      return failedRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index) {
      return failedRequests_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < succeededRequests_.size(); i++) {
        output.writeMessage(1, succeededRequests_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        output.writeMessage(2, failedRequests_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < succeededRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, succeededRequests_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, failedRequests_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto) obj;

      if (!getSucceededRequestsList()
          .equals(other.getSucceededRequestsList())) return false;
      if (!getFailedRequestsList()
          .equals(other.getFailedRequestsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getSucceededRequestsCount() > 0) {
        hash = (37 * hash) + SUCCEEDED_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getSucceededRequestsList().hashCode();
      }
      if (getFailedRequestsCount() > 0) {
        hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getFailedRequestsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerUpdateResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerUpdateResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (succeededRequestsBuilder_ == null) {
          succeededRequests_ = java.util.Collections.emptyList();
        } else {
          succeededRequests_ = null;
          succeededRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
        } else {
          failedRequests_ = null;
          failedRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto result) {
        if (succeededRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.succeededRequests_ = succeededRequests_;
        } else {
          result.succeededRequests_ = succeededRequestsBuilder_.build();
        }
        if (failedRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.failedRequests_ = failedRequests_;
        } else {
          result.failedRequests_ = failedRequestsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.getDefaultInstance()) return this;
        if (succeededRequestsBuilder_ == null) {
          if (!other.succeededRequests_.isEmpty()) {
            if (succeededRequests_.isEmpty()) {
              succeededRequests_ = other.succeededRequests_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureSucceededRequestsIsMutable();
              succeededRequests_.addAll(other.succeededRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.succeededRequests_.isEmpty()) {
            if (succeededRequestsBuilder_.isEmpty()) {
              succeededRequestsBuilder_.dispose();
              succeededRequestsBuilder_ = null;
              succeededRequests_ = other.succeededRequests_;
              bitField0_ = (bitField0_ & ~0x00000001);
              succeededRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getSucceededRequestsFieldBuilder() : null;
            } else {
              succeededRequestsBuilder_.addAllMessages(other.succeededRequests_);
            }
          }
        }
        if (failedRequestsBuilder_ == null) {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequests_.isEmpty()) {
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureFailedRequestsIsMutable();
              failedRequests_.addAll(other.failedRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequestsBuilder_.isEmpty()) {
              failedRequestsBuilder_.dispose();
              failedRequestsBuilder_ = null;
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
              failedRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getFailedRequestsFieldBuilder() : null;
            } else {
              failedRequestsBuilder_.addAllMessages(other.failedRequests_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER,
                        extensionRegistry);
                if (succeededRequestsBuilder_ == null) {
                  ensureSucceededRequestsIsMutable();
                  succeededRequests_.add(m);
                } else {
                  succeededRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER,
                        extensionRegistry);
                if (failedRequestsBuilder_ == null) {
                  ensureFailedRequestsIsMutable();
                  failedRequests_.add(m);
                } else {
                  failedRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> succeededRequests_ =
        java.util.Collections.emptyList();
      private void ensureSucceededRequestsIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          succeededRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto>(succeededRequests_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> succeededRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getSucceededRequestsList() {
        if (succeededRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(succeededRequests_);
        } else {
          return succeededRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public int getSucceededRequestsCount() {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.size();
        } else {
          return succeededRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.get(index);
        } else {
          return succeededRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder setSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.set(index, value);
          onChanged();
        } else {
          succeededRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder setSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(value);
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (succeededRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(index, value);
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addSucceededRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          succeededRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder addAllSucceededRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> values) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, succeededRequests_);
          onChanged();
        } else {
          succeededRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder clearSucceededRequests() {
        if (succeededRequestsBuilder_ == null) {
          succeededRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          succeededRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public Builder removeSucceededRequests(int index) {
        if (succeededRequestsBuilder_ == null) {
          ensureSucceededRequestsIsMutable();
          succeededRequests_.remove(index);
          onChanged();
        } else {
          succeededRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getSucceededRequestsBuilder(
          int index) {
        return getSucceededRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder(
          int index) {
        if (succeededRequestsBuilder_ == null) {
          return succeededRequests_.get(index);  } else {
          return succeededRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
           getSucceededRequestsOrBuilderList() {
        if (succeededRequestsBuilder_ != null) {
          return succeededRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(succeededRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder() {
        return getSucceededRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder(
          int index) {
        return getSucceededRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder> 
           getSucceededRequestsBuilderList() {
        return getSucceededRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getSucceededRequestsFieldBuilder() {
        if (succeededRequestsBuilder_ == null) {
          succeededRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  succeededRequests_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          succeededRequests_ = null;
        }
        return succeededRequestsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_ =
        java.util.Collections.emptyList();
      private void ensureFailedRequestsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          failedRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto>(failedRequests_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
        if (failedRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(failedRequests_);
        } else {
          return failedRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public int getFailedRequestsCount() {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.size();
        } else {
          return failedRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);
        } else {
          return failedRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addAllFailedRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> values) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, failedRequests_);
          onChanged();
        } else {
          failedRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder clearFailedRequests() {
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          failedRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder removeFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.remove(index);
          onChanged();
        } else {
          failedRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
          int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);  } else {
          return failedRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
           getFailedRequestsOrBuilderList() {
        if (failedRequestsBuilder_ != null) {
          return failedRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(failedRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() {
        return getFailedRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder> 
           getFailedRequestsBuilderList() {
        return getFailedRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
          getFailedRequestsFieldBuilder() {
        if (failedRequestsBuilder_ == null) {
          failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>(
                  failedRequests_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          failedRequests_ = null;
        }
        return failedRequestsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerUpdateResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerUpdateResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerUpdateResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerUpdateResponseProto>() {
      @java.lang.Override
      public ContainerUpdateResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerUpdateResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerUpdateResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetApplicationAttemptReportRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationAttemptReportRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return Whether the applicationAttemptId field is set.
     */
    boolean hasApplicationAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return The applicationAttemptId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetApplicationAttemptReportRequestProto}
   */
  public static final class GetApplicationAttemptReportRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationAttemptReportRequestProto)
      GetApplicationAttemptReportRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetApplicationAttemptReportRequestProto.newBuilder() to construct.
    private GetApplicationAttemptReportRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetApplicationAttemptReportRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetApplicationAttemptReportRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return Whether the applicationAttemptId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationAttemptId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return The applicationAttemptId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationAttemptId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationAttemptId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto) obj;

      if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false;
      if (hasApplicationAttemptId()) {
        if (!getApplicationAttemptId()
            .equals(other.getApplicationAttemptId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationAttemptId()) {
        hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationAttemptId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetApplicationAttemptReportRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationAttemptReportRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationAttemptIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationAttemptId_ = null;
        if (applicationAttemptIdBuilder_ != null) {
          applicationAttemptIdBuilder_.dispose();
          applicationAttemptIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationAttemptId_ = applicationAttemptIdBuilder_ == null
              ? applicationAttemptId_
              : applicationAttemptIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationAttemptId()) {
          mergeApplicationAttemptId(other.getApplicationAttemptId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationAttemptIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       * @return Whether the applicationAttemptId field is set.
       */
      public boolean hasApplicationAttemptId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       * @return The applicationAttemptId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        } else {
          return applicationAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationAttemptId_ = value;
        } else {
          applicationAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder setApplicationAttemptId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = builderForValue.build();
        } else {
          applicationAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationAttemptId_ != null &&
            applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
            getApplicationAttemptIdBuilder().mergeFrom(value);
          } else {
            applicationAttemptId_ = value;
          }
        } else {
          applicationAttemptIdBuilder_.mergeFrom(value);
        }
        if (applicationAttemptId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder clearApplicationAttemptId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationAttemptId_ = null;
        if (applicationAttemptIdBuilder_ != null) {
          applicationAttemptIdBuilder_.dispose();
          applicationAttemptIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
        if (applicationAttemptIdBuilder_ != null) {
          return applicationAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationAttemptId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> 
          getApplicationAttemptIdFieldBuilder() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
                  getApplicationAttemptId(),
                  getParentForChildren(),
                  isClean());
          applicationAttemptId_ = null;
        }
        return applicationAttemptIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationAttemptReportRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationAttemptReportRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptReportRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetApplicationAttemptReportRequestProto>() {
      @java.lang.Override
      public GetApplicationAttemptReportRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptReportRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptReportRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetApplicationAttemptReportResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationAttemptReportResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
     * @return Whether the applicationAttemptReport field is set.
     */
    boolean hasApplicationAttemptReport();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
     * @return The applicationAttemptReport.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttemptReport();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptReportOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetApplicationAttemptReportResponseProto}
   */
  public static final class GetApplicationAttemptReportResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationAttemptReportResponseProto)
      GetApplicationAttemptReportResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetApplicationAttemptReportResponseProto.newBuilder() to construct.
    private GetApplicationAttemptReportResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetApplicationAttemptReportResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetApplicationAttemptReportResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ATTEMPT_REPORT_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto applicationAttemptReport_;
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
     * @return Whether the applicationAttemptReport field is set.
     */
    @java.lang.Override
    public boolean hasApplicationAttemptReport() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
     * @return The applicationAttemptReport.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttemptReport() {
      return applicationAttemptReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance() : applicationAttemptReport_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptReportOrBuilder() {
      return applicationAttemptReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance() : applicationAttemptReport_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationAttemptReport());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationAttemptReport());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto) obj;

      if (hasApplicationAttemptReport() != other.hasApplicationAttemptReport()) return false;
      if (hasApplicationAttemptReport()) {
        if (!getApplicationAttemptReport()
            .equals(other.getApplicationAttemptReport())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationAttemptReport()) {
        hash = (37 * hash) + APPLICATION_ATTEMPT_REPORT_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationAttemptReport().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetApplicationAttemptReportResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationAttemptReportResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationAttemptReportFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationAttemptReport_ = null;
        if (applicationAttemptReportBuilder_ != null) {
          applicationAttemptReportBuilder_.dispose();
          applicationAttemptReportBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationAttemptReport_ = applicationAttemptReportBuilder_ == null
              ? applicationAttemptReport_
              : applicationAttemptReportBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.getDefaultInstance()) return this;
        if (other.hasApplicationAttemptReport()) {
          mergeApplicationAttemptReport(other.getApplicationAttemptReport());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationAttemptReportFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto applicationAttemptReport_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> applicationAttemptReportBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
       * @return Whether the applicationAttemptReport field is set.
       */
      public boolean hasApplicationAttemptReport() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
       * @return The applicationAttemptReport.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttemptReport() {
        if (applicationAttemptReportBuilder_ == null) {
          return applicationAttemptReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance() : applicationAttemptReport_;
        } else {
          return applicationAttemptReportBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
       */
      public Builder setApplicationAttemptReport(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto value) {
        if (applicationAttemptReportBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationAttemptReport_ = value;
        } else {
          applicationAttemptReportBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
       */
      public Builder setApplicationAttemptReport(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder builderForValue) {
        if (applicationAttemptReportBuilder_ == null) {
          applicationAttemptReport_ = builderForValue.build();
        } else {
          applicationAttemptReportBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
       */
      public Builder mergeApplicationAttemptReport(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto value) {
        if (applicationAttemptReportBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationAttemptReport_ != null &&
            applicationAttemptReport_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance()) {
            getApplicationAttemptReportBuilder().mergeFrom(value);
          } else {
            applicationAttemptReport_ = value;
          }
        } else {
          applicationAttemptReportBuilder_.mergeFrom(value);
        }
        if (applicationAttemptReport_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
       */
      public Builder clearApplicationAttemptReport() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationAttemptReport_ = null;
        if (applicationAttemptReportBuilder_ != null) {
          applicationAttemptReportBuilder_.dispose();
          applicationAttemptReportBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder getApplicationAttemptReportBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationAttemptReportFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptReportOrBuilder() {
        if (applicationAttemptReportBuilder_ != null) {
          return applicationAttemptReportBuilder_.getMessageOrBuilder();
        } else {
          return applicationAttemptReport_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance() : applicationAttemptReport_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> 
          getApplicationAttemptReportFieldBuilder() {
        if (applicationAttemptReportBuilder_ == null) {
          applicationAttemptReportBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder>(
                  getApplicationAttemptReport(),
                  getParentForChildren(),
                  isClean());
          applicationAttemptReport_ = null;
        }
        return applicationAttemptReportBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationAttemptReportResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationAttemptReportResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptReportResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetApplicationAttemptReportResponseProto>() {
      @java.lang.Override
      public GetApplicationAttemptReportResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptReportResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptReportResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetApplicationAttemptsRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationAttemptsRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetApplicationAttemptsRequestProto}
   */
  public static final class GetApplicationAttemptsRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationAttemptsRequestProto)
      GetApplicationAttemptsRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetApplicationAttemptsRequestProto.newBuilder() to construct.
    private GetApplicationAttemptsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetApplicationAttemptsRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetApplicationAttemptsRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetApplicationAttemptsRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationAttemptsRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationAttemptsRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationAttemptsRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptsRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetApplicationAttemptsRequestProto>() {
      @java.lang.Override
      public GetApplicationAttemptsRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptsRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptsRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetApplicationAttemptsResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationAttemptsResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto> 
        getApplicationAttemptsList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttempts(int index);
    /**
     * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
     */
    int getApplicationAttemptsCount();
    /**
     * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> 
        getApplicationAttemptsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetApplicationAttemptsResponseProto}
   */
  public static final class GetApplicationAttemptsResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationAttemptsResponseProto)
      GetApplicationAttemptsResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetApplicationAttemptsResponseProto.newBuilder() to construct.
    private GetApplicationAttemptsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetApplicationAttemptsResponseProto() {
      applicationAttempts_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetApplicationAttemptsResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.Builder.class);
    }

    public static final int APPLICATION_ATTEMPTS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto> applicationAttempts_;
    /**
     * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto> getApplicationAttemptsList() {
      return applicationAttempts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> 
        getApplicationAttemptsOrBuilderList() {
      return applicationAttempts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
     */
    @java.lang.Override
    public int getApplicationAttemptsCount() {
      return applicationAttempts_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttempts(int index) {
      return applicationAttempts_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptsOrBuilder(
        int index) {
      return applicationAttempts_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < applicationAttempts_.size(); i++) {
        output.writeMessage(1, applicationAttempts_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < applicationAttempts_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, applicationAttempts_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto) obj;

      if (!getApplicationAttemptsList()
          .equals(other.getApplicationAttemptsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getApplicationAttemptsCount() > 0) {
        hash = (37 * hash) + APPLICATION_ATTEMPTS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationAttemptsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetApplicationAttemptsResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationAttemptsResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (applicationAttemptsBuilder_ == null) {
          applicationAttempts_ = java.util.Collections.emptyList();
        } else {
          applicationAttempts_ = null;
          applicationAttemptsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto result) {
        if (applicationAttemptsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            applicationAttempts_ = java.util.Collections.unmodifiableList(applicationAttempts_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.applicationAttempts_ = applicationAttempts_;
        } else {
          result.applicationAttempts_ = applicationAttemptsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.getDefaultInstance()) return this;
        if (applicationAttemptsBuilder_ == null) {
          if (!other.applicationAttempts_.isEmpty()) {
            if (applicationAttempts_.isEmpty()) {
              applicationAttempts_ = other.applicationAttempts_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureApplicationAttemptsIsMutable();
              applicationAttempts_.addAll(other.applicationAttempts_);
            }
            onChanged();
          }
        } else {
          if (!other.applicationAttempts_.isEmpty()) {
            if (applicationAttemptsBuilder_.isEmpty()) {
              applicationAttemptsBuilder_.dispose();
              applicationAttemptsBuilder_ = null;
              applicationAttempts_ = other.applicationAttempts_;
              bitField0_ = (bitField0_ & ~0x00000001);
              applicationAttemptsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationAttemptsFieldBuilder() : null;
            } else {
              applicationAttemptsBuilder_.addAllMessages(other.applicationAttempts_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.PARSER,
                        extensionRegistry);
                if (applicationAttemptsBuilder_ == null) {
                  ensureApplicationAttemptsIsMutable();
                  applicationAttempts_.add(m);
                } else {
                  applicationAttemptsBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto> applicationAttempts_ =
        java.util.Collections.emptyList();
      private void ensureApplicationAttemptsIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          applicationAttempts_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto>(applicationAttempts_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> applicationAttemptsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto> getApplicationAttemptsList() {
        if (applicationAttemptsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applicationAttempts_);
        } else {
          return applicationAttemptsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public int getApplicationAttemptsCount() {
        if (applicationAttemptsBuilder_ == null) {
          return applicationAttempts_.size();
        } else {
          return applicationAttemptsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttempts(int index) {
        if (applicationAttemptsBuilder_ == null) {
          return applicationAttempts_.get(index);
        } else {
          return applicationAttemptsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public Builder setApplicationAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto value) {
        if (applicationAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationAttemptsIsMutable();
          applicationAttempts_.set(index, value);
          onChanged();
        } else {
          applicationAttemptsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public Builder setApplicationAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder builderForValue) {
        if (applicationAttemptsBuilder_ == null) {
          ensureApplicationAttemptsIsMutable();
          applicationAttempts_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationAttemptsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public Builder addApplicationAttempts(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto value) {
        if (applicationAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationAttemptsIsMutable();
          applicationAttempts_.add(value);
          onChanged();
        } else {
          applicationAttemptsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public Builder addApplicationAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto value) {
        if (applicationAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationAttemptsIsMutable();
          applicationAttempts_.add(index, value);
          onChanged();
        } else {
          applicationAttemptsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public Builder addApplicationAttempts(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder builderForValue) {
        if (applicationAttemptsBuilder_ == null) {
          ensureApplicationAttemptsIsMutable();
          applicationAttempts_.add(builderForValue.build());
          onChanged();
        } else {
          applicationAttemptsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public Builder addApplicationAttempts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder builderForValue) {
        if (applicationAttemptsBuilder_ == null) {
          ensureApplicationAttemptsIsMutable();
          applicationAttempts_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationAttemptsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public Builder addAllApplicationAttempts(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto> values) {
        if (applicationAttemptsBuilder_ == null) {
          ensureApplicationAttemptsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applicationAttempts_);
          onChanged();
        } else {
          applicationAttemptsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public Builder clearApplicationAttempts() {
        if (applicationAttemptsBuilder_ == null) {
          applicationAttempts_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          applicationAttemptsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public Builder removeApplicationAttempts(int index) {
        if (applicationAttemptsBuilder_ == null) {
          ensureApplicationAttemptsIsMutable();
          applicationAttempts_.remove(index);
          onChanged();
        } else {
          applicationAttemptsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder getApplicationAttemptsBuilder(
          int index) {
        return getApplicationAttemptsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptsOrBuilder(
          int index) {
        if (applicationAttemptsBuilder_ == null) {
          return applicationAttempts_.get(index);  } else {
          return applicationAttemptsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> 
           getApplicationAttemptsOrBuilderList() {
        if (applicationAttemptsBuilder_ != null) {
          return applicationAttemptsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applicationAttempts_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder addApplicationAttemptsBuilder() {
        return getApplicationAttemptsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder addApplicationAttemptsBuilder(
          int index) {
        return getApplicationAttemptsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder> 
           getApplicationAttemptsBuilderList() {
        return getApplicationAttemptsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> 
          getApplicationAttemptsFieldBuilder() {
        if (applicationAttemptsBuilder_ == null) {
          applicationAttemptsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder>(
                  applicationAttempts_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          applicationAttempts_ = null;
        }
        return applicationAttemptsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationAttemptsResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationAttemptsResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptsResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetApplicationAttemptsResponseProto>() {
      @java.lang.Override
      public GetApplicationAttemptsResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptsResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetApplicationAttemptsResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetContainerReportRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainerReportRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    boolean hasContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetContainerReportRequestProto}
   */
  public static final class GetContainerReportRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainerReportRequestProto)
      GetContainerReportRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetContainerReportRequestProto.newBuilder() to construct.
    private GetContainerReportRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetContainerReportRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetContainerReportRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    @java.lang.Override
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetContainerReportRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainerReportRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerId_ = containerIdBuilder_ == null
              ? containerId_
              : containerIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return Whether the containerId field is set.
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return The containerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerId_ != null &&
            containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getContainerIdBuilder().mergeFrom(value);
          } else {
            containerId_ = value;
          }
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        if (containerId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainerReportRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainerReportRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerReportRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetContainerReportRequestProto>() {
      @java.lang.Override
      public GetContainerReportRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerReportRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerReportRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetContainerReportResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainerReportResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
     * @return Whether the containerReport field is set.
     */
    boolean hasContainerReport();
    /**
     * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
     * @return The containerReport.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainerReport();
    /**
     * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainerReportOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetContainerReportResponseProto}
   */
  public static final class GetContainerReportResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainerReportResponseProto)
      GetContainerReportResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetContainerReportResponseProto.newBuilder() to construct.
    private GetContainerReportResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetContainerReportResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetContainerReportResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_REPORT_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto containerReport_;
    /**
     * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
     * @return Whether the containerReport field is set.
     */
    @java.lang.Override
    public boolean hasContainerReport() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
     * @return The containerReport.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainerReport() {
      return containerReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance() : containerReport_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainerReportOrBuilder() {
      return containerReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance() : containerReport_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasContainerReport()) {
        if (!getContainerReport().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerReport());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerReport());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto) obj;

      if (hasContainerReport() != other.hasContainerReport()) return false;
      if (hasContainerReport()) {
        if (!getContainerReport()
            .equals(other.getContainerReport())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerReport()) {
        hash = (37 * hash) + CONTAINER_REPORT_FIELD_NUMBER;
        hash = (53 * hash) + getContainerReport().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetContainerReportResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainerReportResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerReportFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerReport_ = null;
        if (containerReportBuilder_ != null) {
          containerReportBuilder_.dispose();
          containerReportBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerReport_ = containerReportBuilder_ == null
              ? containerReport_
              : containerReportBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.getDefaultInstance()) return this;
        if (other.hasContainerReport()) {
          mergeContainerReport(other.getContainerReport());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasContainerReport()) {
          if (!getContainerReport().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerReportFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto containerReport_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> containerReportBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
       * @return Whether the containerReport field is set.
       */
      public boolean hasContainerReport() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
       * @return The containerReport.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainerReport() {
        if (containerReportBuilder_ == null) {
          return containerReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance() : containerReport_;
        } else {
          return containerReportBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
       */
      public Builder setContainerReport(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto value) {
        if (containerReportBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerReport_ = value;
        } else {
          containerReportBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
       */
      public Builder setContainerReport(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder builderForValue) {
        if (containerReportBuilder_ == null) {
          containerReport_ = builderForValue.build();
        } else {
          containerReportBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
       */
      public Builder mergeContainerReport(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto value) {
        if (containerReportBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerReport_ != null &&
            containerReport_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance()) {
            getContainerReportBuilder().mergeFrom(value);
          } else {
            containerReport_ = value;
          }
        } else {
          containerReportBuilder_.mergeFrom(value);
        }
        if (containerReport_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
       */
      public Builder clearContainerReport() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerReport_ = null;
        if (containerReportBuilder_ != null) {
          containerReportBuilder_.dispose();
          containerReportBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder getContainerReportBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerReportFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainerReportOrBuilder() {
        if (containerReportBuilder_ != null) {
          return containerReportBuilder_.getMessageOrBuilder();
        } else {
          return containerReport_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance() : containerReport_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerReportProto container_report = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> 
          getContainerReportFieldBuilder() {
        if (containerReportBuilder_ == null) {
          containerReportBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder>(
                  getContainerReport(),
                  getParentForChildren(),
                  isClean());
          containerReport_ = null;
        }
        return containerReportBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainerReportResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainerReportResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerReportResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetContainerReportResponseProto>() {
      @java.lang.Override
      public GetContainerReportResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerReportResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetContainerReportResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetContainersRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainersRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return Whether the applicationAttemptId field is set.
     */
    boolean hasApplicationAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return The applicationAttemptId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetContainersRequestProto}
   */
  public static final class GetContainersRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainersRequestProto)
      GetContainersRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetContainersRequestProto.newBuilder() to construct.
    private GetContainersRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetContainersRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetContainersRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return Whether the applicationAttemptId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationAttemptId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return The applicationAttemptId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationAttemptId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationAttemptId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto) obj;

      if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false;
      if (hasApplicationAttemptId()) {
        if (!getApplicationAttemptId()
            .equals(other.getApplicationAttemptId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationAttemptId()) {
        hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationAttemptId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetContainersRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainersRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationAttemptIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationAttemptId_ = null;
        if (applicationAttemptIdBuilder_ != null) {
          applicationAttemptIdBuilder_.dispose();
          applicationAttemptIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationAttemptId_ = applicationAttemptIdBuilder_ == null
              ? applicationAttemptId_
              : applicationAttemptIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationAttemptId()) {
          mergeApplicationAttemptId(other.getApplicationAttemptId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationAttemptIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       * @return Whether the applicationAttemptId field is set.
       */
      public boolean hasApplicationAttemptId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       * @return The applicationAttemptId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        } else {
          return applicationAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationAttemptId_ = value;
        } else {
          applicationAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder setApplicationAttemptId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = builderForValue.build();
        } else {
          applicationAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationAttemptId_ != null &&
            applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
            getApplicationAttemptIdBuilder().mergeFrom(value);
          } else {
            applicationAttemptId_ = value;
          }
        } else {
          applicationAttemptIdBuilder_.mergeFrom(value);
        }
        if (applicationAttemptId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder clearApplicationAttemptId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationAttemptId_ = null;
        if (applicationAttemptIdBuilder_ != null) {
          applicationAttemptIdBuilder_.dispose();
          applicationAttemptIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
        if (applicationAttemptIdBuilder_ != null) {
          return applicationAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationAttemptId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> 
          getApplicationAttemptIdFieldBuilder() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
                  getApplicationAttemptId(),
                  getParentForChildren(),
                  isClean());
          applicationAttemptId_ = null;
        }
        return applicationAttemptIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainersRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainersRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetContainersRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetContainersRequestProto>() {
      @java.lang.Override
      public GetContainersRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetContainersRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetContainersRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetContainersResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainersResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto> 
        getContainersList();
    /**
     * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainers(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
     */
    int getContainersCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> 
        getContainersOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainersOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetContainersResponseProto}
   */
  public static final class GetContainersResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainersResponseProto)
      GetContainersResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetContainersResponseProto.newBuilder() to construct.
    private GetContainersResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetContainersResponseProto() {
      containers_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetContainersResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.Builder.class);
    }

    public static final int CONTAINERS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto> containers_;
    /**
     * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto> getContainersList() {
      return containers_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> 
        getContainersOrBuilderList() {
      return containers_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
     */
    @java.lang.Override
    public int getContainersCount() {
      return containers_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainers(int index) {
      return containers_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainersOrBuilder(
        int index) {
      return containers_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getContainersCount(); i++) {
        if (!getContainers(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < containers_.size(); i++) {
        output.writeMessage(1, containers_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < containers_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, containers_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto) obj;

      if (!getContainersList()
          .equals(other.getContainersList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getContainersCount() > 0) {
        hash = (37 * hash) + CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getContainersList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetContainersResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainersResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (containersBuilder_ == null) {
          containers_ = java.util.Collections.emptyList();
        } else {
          containers_ = null;
          containersBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto result) {
        if (containersBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            containers_ = java.util.Collections.unmodifiableList(containers_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.containers_ = containers_;
        } else {
          result.containers_ = containersBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.getDefaultInstance()) return this;
        if (containersBuilder_ == null) {
          if (!other.containers_.isEmpty()) {
            if (containers_.isEmpty()) {
              containers_ = other.containers_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureContainersIsMutable();
              containers_.addAll(other.containers_);
            }
            onChanged();
          }
        } else {
          if (!other.containers_.isEmpty()) {
            if (containersBuilder_.isEmpty()) {
              containersBuilder_.dispose();
              containersBuilder_ = null;
              containers_ = other.containers_;
              bitField0_ = (bitField0_ & ~0x00000001);
              containersBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getContainersFieldBuilder() : null;
            } else {
              containersBuilder_.addAllMessages(other.containers_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getContainersCount(); i++) {
          if (!getContainers(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.PARSER,
                        extensionRegistry);
                if (containersBuilder_ == null) {
                  ensureContainersIsMutable();
                  containers_.add(m);
                } else {
                  containersBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto> containers_ =
        java.util.Collections.emptyList();
      private void ensureContainersIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          containers_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto>(containers_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> containersBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto> getContainersList() {
        if (containersBuilder_ == null) {
          return java.util.Collections.unmodifiableList(containers_);
        } else {
          return containersBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public int getContainersCount() {
        if (containersBuilder_ == null) {
          return containers_.size();
        } else {
          return containersBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainers(int index) {
        if (containersBuilder_ == null) {
          return containers_.get(index);
        } else {
          return containersBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public Builder setContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto value) {
        if (containersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersIsMutable();
          containers_.set(index, value);
          onChanged();
        } else {
          containersBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public Builder setContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder builderForValue) {
        if (containersBuilder_ == null) {
          ensureContainersIsMutable();
          containers_.set(index, builderForValue.build());
          onChanged();
        } else {
          containersBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public Builder addContainers(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto value) {
        if (containersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersIsMutable();
          containers_.add(value);
          onChanged();
        } else {
          containersBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public Builder addContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto value) {
        if (containersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersIsMutable();
          containers_.add(index, value);
          onChanged();
        } else {
          containersBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public Builder addContainers(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder builderForValue) {
        if (containersBuilder_ == null) {
          ensureContainersIsMutable();
          containers_.add(builderForValue.build());
          onChanged();
        } else {
          containersBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public Builder addContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder builderForValue) {
        if (containersBuilder_ == null) {
          ensureContainersIsMutable();
          containers_.add(index, builderForValue.build());
          onChanged();
        } else {
          containersBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public Builder addAllContainers(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto> values) {
        if (containersBuilder_ == null) {
          ensureContainersIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, containers_);
          onChanged();
        } else {
          containersBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public Builder clearContainers() {
        if (containersBuilder_ == null) {
          containers_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          containersBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public Builder removeContainers(int index) {
        if (containersBuilder_ == null) {
          ensureContainersIsMutable();
          containers_.remove(index);
          onChanged();
        } else {
          containersBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder getContainersBuilder(
          int index) {
        return getContainersFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainersOrBuilder(
          int index) {
        if (containersBuilder_ == null) {
          return containers_.get(index);  } else {
          return containersBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> 
           getContainersOrBuilderList() {
        if (containersBuilder_ != null) {
          return containersBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(containers_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder addContainersBuilder() {
        return getContainersFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder addContainersBuilder(
          int index) {
        return getContainersFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerReportProto containers = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder> 
           getContainersBuilderList() {
        return getContainersFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> 
          getContainersFieldBuilder() {
        if (containersBuilder_ == null) {
          containersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder>(
                  containers_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          containers_ = null;
        }
        return containersBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainersResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainersResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetContainersResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetContainersResponseProto>() {
      @java.lang.Override
      public GetContainersResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetContainersResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetContainersResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface UseSharedCacheResourceRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.UseSharedCacheResourceRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * <code>optional string resourceKey = 2;</code>
     * @return Whether the resourceKey field is set.
     */
    boolean hasResourceKey();
    /**
     * <code>optional string resourceKey = 2;</code>
     * @return The resourceKey.
     */
    java.lang.String getResourceKey();
    /**
     * <code>optional string resourceKey = 2;</code>
     * @return The bytes for resourceKey.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getResourceKeyBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.UseSharedCacheResourceRequestProto}
   */
  public static final class UseSharedCacheResourceRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.UseSharedCacheResourceRequestProto)
      UseSharedCacheResourceRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use UseSharedCacheResourceRequestProto.newBuilder() to construct.
    private UseSharedCacheResourceRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private UseSharedCacheResourceRequestProto() {
      resourceKey_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new UseSharedCacheResourceRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATIONID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int RESOURCEKEY_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object resourceKey_ = "";
    /**
     * <code>optional string resourceKey = 2;</code>
     * @return Whether the resourceKey field is set.
     */
    @java.lang.Override
    public boolean hasResourceKey() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string resourceKey = 2;</code>
     * @return The resourceKey.
     */
    @java.lang.Override
    public java.lang.String getResourceKey() {
      java.lang.Object ref = resourceKey_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          resourceKey_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string resourceKey = 2;</code>
     * @return The bytes for resourceKey.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getResourceKeyBytes() {
      java.lang.Object ref = resourceKey_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        resourceKey_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, resourceKey_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, resourceKey_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasResourceKey() != other.hasResourceKey()) return false;
      if (hasResourceKey()) {
        if (!getResourceKey()
            .equals(other.getResourceKey())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasResourceKey()) {
        hash = (37 * hash) + RESOURCEKEY_FIELD_NUMBER;
        hash = (53 * hash) + getResourceKey().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.UseSharedCacheResourceRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.UseSharedCacheResourceRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        resourceKey_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.resourceKey_ = resourceKey_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasResourceKey()) {
          resourceKey_ = other.resourceKey_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                resourceKey_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private java.lang.Object resourceKey_ = "";
      /**
       * <code>optional string resourceKey = 2;</code>
       * @return Whether the resourceKey field is set.
       */
      public boolean hasResourceKey() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string resourceKey = 2;</code>
       * @return The resourceKey.
       */
      public java.lang.String getResourceKey() {
        java.lang.Object ref = resourceKey_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            resourceKey_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string resourceKey = 2;</code>
       * @return The bytes for resourceKey.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getResourceKeyBytes() {
        java.lang.Object ref = resourceKey_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          resourceKey_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string resourceKey = 2;</code>
       * @param value The resourceKey to set.
       * @return This builder for chaining.
       */
      public Builder setResourceKey(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        resourceKey_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string resourceKey = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearResourceKey() {
        resourceKey_ = getDefaultInstance().getResourceKey();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string resourceKey = 2;</code>
       * @param value The bytes for resourceKey to set.
       * @return This builder for chaining.
       */
      public Builder setResourceKeyBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        resourceKey_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.UseSharedCacheResourceRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.UseSharedCacheResourceRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<UseSharedCacheResourceRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<UseSharedCacheResourceRequestProto>() {
      @java.lang.Override
      public UseSharedCacheResourceRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<UseSharedCacheResourceRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<UseSharedCacheResourceRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface UseSharedCacheResourceResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.UseSharedCacheResourceResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string path = 1;</code>
     * @return Whether the path field is set.
     */
    boolean hasPath();
    /**
     * <code>optional string path = 1;</code>
     * @return The path.
     */
    java.lang.String getPath();
    /**
     * <code>optional string path = 1;</code>
     * @return The bytes for path.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getPathBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.UseSharedCacheResourceResponseProto}
   */
  public static final class UseSharedCacheResourceResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.UseSharedCacheResourceResponseProto)
      UseSharedCacheResourceResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use UseSharedCacheResourceResponseProto.newBuilder() to construct.
    private UseSharedCacheResourceResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private UseSharedCacheResourceResponseProto() {
      path_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new UseSharedCacheResourceResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int PATH_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object path_ = "";
    /**
     * <code>optional string path = 1;</code>
     * @return Whether the path field is set.
     */
    @java.lang.Override
    public boolean hasPath() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string path = 1;</code>
     * @return The path.
     */
    @java.lang.Override
    public java.lang.String getPath() {
      java.lang.Object ref = path_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          path_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string path = 1;</code>
     * @return The bytes for path.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getPathBytes() {
      java.lang.Object ref = path_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        path_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, path_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, path_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto) obj;

      if (hasPath() != other.hasPath()) return false;
      if (hasPath()) {
        if (!getPath()
            .equals(other.getPath())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasPath()) {
        hash = (37 * hash) + PATH_FIELD_NUMBER;
        hash = (53 * hash) + getPath().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.UseSharedCacheResourceResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.UseSharedCacheResourceResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        path_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.path_ = path_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.getDefaultInstance()) return this;
        if (other.hasPath()) {
          path_ = other.path_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                path_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object path_ = "";
      /**
       * <code>optional string path = 1;</code>
       * @return Whether the path field is set.
       */
      public boolean hasPath() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string path = 1;</code>
       * @return The path.
       */
      public java.lang.String getPath() {
        java.lang.Object ref = path_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            path_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string path = 1;</code>
       * @return The bytes for path.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getPathBytes() {
        java.lang.Object ref = path_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          path_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string path = 1;</code>
       * @param value The path to set.
       * @return This builder for chaining.
       */
      public Builder setPath(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        path_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string path = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearPath() {
        path_ = getDefaultInstance().getPath();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string path = 1;</code>
       * @param value The bytes for path to set.
       * @return This builder for chaining.
       */
      public Builder setPathBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        path_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.UseSharedCacheResourceResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.UseSharedCacheResourceResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<UseSharedCacheResourceResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<UseSharedCacheResourceResponseProto>() {
      @java.lang.Override
      public UseSharedCacheResourceResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<UseSharedCacheResourceResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<UseSharedCacheResourceResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReleaseSharedCacheResourceRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReleaseSharedCacheResourceRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * <code>optional string resourceKey = 2;</code>
     * @return Whether the resourceKey field is set.
     */
    boolean hasResourceKey();
    /**
     * <code>optional string resourceKey = 2;</code>
     * @return The resourceKey.
     */
    java.lang.String getResourceKey();
    /**
     * <code>optional string resourceKey = 2;</code>
     * @return The bytes for resourceKey.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getResourceKeyBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReleaseSharedCacheResourceRequestProto}
   */
  public static final class ReleaseSharedCacheResourceRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReleaseSharedCacheResourceRequestProto)
      ReleaseSharedCacheResourceRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReleaseSharedCacheResourceRequestProto.newBuilder() to construct.
    private ReleaseSharedCacheResourceRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReleaseSharedCacheResourceRequestProto() {
      resourceKey_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReleaseSharedCacheResourceRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATIONID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int RESOURCEKEY_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object resourceKey_ = "";
    /**
     * <code>optional string resourceKey = 2;</code>
     * @return Whether the resourceKey field is set.
     */
    @java.lang.Override
    public boolean hasResourceKey() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string resourceKey = 2;</code>
     * @return The resourceKey.
     */
    @java.lang.Override
    public java.lang.String getResourceKey() {
      java.lang.Object ref = resourceKey_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          resourceKey_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string resourceKey = 2;</code>
     * @return The bytes for resourceKey.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getResourceKeyBytes() {
      java.lang.Object ref = resourceKey_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        resourceKey_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, resourceKey_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, resourceKey_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasResourceKey() != other.hasResourceKey()) return false;
      if (hasResourceKey()) {
        if (!getResourceKey()
            .equals(other.getResourceKey())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasResourceKey()) {
        hash = (37 * hash) + RESOURCEKEY_FIELD_NUMBER;
        hash = (53 * hash) + getResourceKey().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReleaseSharedCacheResourceRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReleaseSharedCacheResourceRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        resourceKey_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.resourceKey_ = resourceKey_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasResourceKey()) {
          resourceKey_ = other.resourceKey_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                resourceKey_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private java.lang.Object resourceKey_ = "";
      /**
       * <code>optional string resourceKey = 2;</code>
       * @return Whether the resourceKey field is set.
       */
      public boolean hasResourceKey() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string resourceKey = 2;</code>
       * @return The resourceKey.
       */
      public java.lang.String getResourceKey() {
        java.lang.Object ref = resourceKey_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            resourceKey_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string resourceKey = 2;</code>
       * @return The bytes for resourceKey.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getResourceKeyBytes() {
        java.lang.Object ref = resourceKey_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          resourceKey_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string resourceKey = 2;</code>
       * @param value The resourceKey to set.
       * @return This builder for chaining.
       */
      public Builder setResourceKey(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        resourceKey_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string resourceKey = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearResourceKey() {
        resourceKey_ = getDefaultInstance().getResourceKey();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string resourceKey = 2;</code>
       * @param value The bytes for resourceKey to set.
       * @return This builder for chaining.
       */
      public Builder setResourceKeyBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        resourceKey_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReleaseSharedCacheResourceRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReleaseSharedCacheResourceRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReleaseSharedCacheResourceRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReleaseSharedCacheResourceRequestProto>() {
      @java.lang.Override
      public ReleaseSharedCacheResourceRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReleaseSharedCacheResourceRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReleaseSharedCacheResourceRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReleaseSharedCacheResourceResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReleaseSharedCacheResourceResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReleaseSharedCacheResourceResponseProto}
   */
  public static final class ReleaseSharedCacheResourceResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReleaseSharedCacheResourceResponseProto)
      ReleaseSharedCacheResourceResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReleaseSharedCacheResourceResponseProto.newBuilder() to construct.
    private ReleaseSharedCacheResourceResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReleaseSharedCacheResourceResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReleaseSharedCacheResourceResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReleaseSharedCacheResourceResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReleaseSharedCacheResourceResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReleaseSharedCacheResourceResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReleaseSharedCacheResourceResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReleaseSharedCacheResourceResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReleaseSharedCacheResourceResponseProto>() {
      @java.lang.Override
      public ReleaseSharedCacheResourceResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReleaseSharedCacheResourceResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReleaseSharedCacheResourceResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetNewReservationRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNewReservationRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetNewReservationRequestProto}
   */
  public static final class GetNewReservationRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNewReservationRequestProto)
      GetNewReservationRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetNewReservationRequestProto.newBuilder() to construct.
    private GetNewReservationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetNewReservationRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetNewReservationRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetNewReservationRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNewReservationRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNewReservationRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNewReservationRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetNewReservationRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetNewReservationRequestProto>() {
      @java.lang.Override
      public GetNewReservationRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetNewReservationRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetNewReservationRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetNewReservationResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNewReservationResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     * @return Whether the reservationId field is set.
     */
    boolean hasReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     * @return The reservationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetNewReservationResponseProto}
   */
  public static final class GetNewReservationResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNewReservationResponseProto)
      GetNewReservationResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetNewReservationResponseProto.newBuilder() to construct.
    private GetNewReservationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetNewReservationResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetNewReservationResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESERVATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     * @return Whether the reservationId field is set.
     */
    @java.lang.Override
    public boolean hasReservationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     * @return The reservationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getReservationId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getReservationId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto) obj;

      if (hasReservationId() != other.hasReservationId()) return false;
      if (hasReservationId()) {
        if (!getReservationId()
            .equals(other.getReservationId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasReservationId()) {
        hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getReservationId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetNewReservationResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNewReservationResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getReservationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.reservationId_ = reservationIdBuilder_ == null
              ? reservationId_
              : reservationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.getDefaultInstance()) return this;
        if (other.hasReservationId()) {
          mergeReservationId(other.getReservationId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getReservationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       * @return Whether the reservationId field is set.
       */
      public boolean hasReservationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       * @return The reservationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
        if (reservationIdBuilder_ == null) {
          return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        } else {
          return reservationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservationId_ = value;
        } else {
          reservationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public Builder setReservationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) {
        if (reservationIdBuilder_ == null) {
          reservationId_ = builderForValue.build();
        } else {
          reservationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            reservationId_ != null &&
            reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) {
            getReservationIdBuilder().mergeFrom(value);
          } else {
            reservationId_ = value;
          }
        } else {
          reservationIdBuilder_.mergeFrom(value);
        }
        if (reservationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public Builder clearReservationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getReservationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
        if (reservationIdBuilder_ != null) {
          return reservationIdBuilder_.getMessageOrBuilder();
        } else {
          return reservationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> 
          getReservationIdFieldBuilder() {
        if (reservationIdBuilder_ == null) {
          reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>(
                  getReservationId(),
                  getParentForChildren(),
                  isClean());
          reservationId_ = null;
        }
        return reservationIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNewReservationResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNewReservationResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetNewReservationResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetNewReservationResponseProto>() {
      @java.lang.Override
      public GetNewReservationResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetNewReservationResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetNewReservationResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationSubmissionRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationSubmissionRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string queue = 1;</code>
     * @return Whether the queue field is set.
     */
    boolean hasQueue();
    /**
     * <code>optional string queue = 1;</code>
     * @return The queue.
     */
    java.lang.String getQueue();
    /**
     * <code>optional string queue = 1;</code>
     * @return The bytes for queue.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes();

    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
     * @return Whether the reservationDefinition field is set.
     */
    boolean hasReservationDefinition();
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
     * @return The reservationDefinition.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition();
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
     * @return Whether the reservationId field is set.
     */
    boolean hasReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
     * @return The reservationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationSubmissionRequestProto}
   */
  public static final class ReservationSubmissionRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationSubmissionRequestProto)
      ReservationSubmissionRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationSubmissionRequestProto.newBuilder() to construct.
    private ReservationSubmissionRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationSubmissionRequestProto() {
      queue_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationSubmissionRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int QUEUE_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object queue_ = "";
    /**
     * <code>optional string queue = 1;</code>
     * @return Whether the queue field is set.
     */
    @java.lang.Override
    public boolean hasQueue() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string queue = 1;</code>
     * @return The queue.
     */
    @java.lang.Override
    public java.lang.String getQueue() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queue_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string queue = 1;</code>
     * @return The bytes for queue.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RESERVATION_DEFINITION_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_;
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
     * @return Whether the reservationDefinition field is set.
     */
    @java.lang.Override
    public boolean hasReservationDefinition() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
     * @return The reservationDefinition.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() {
      return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
    }
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() {
      return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
    }

    public static final int RESERVATION_ID_FIELD_NUMBER = 3;
    private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
     * @return Whether the reservationId field is set.
     */
    @java.lang.Override
    public boolean hasReservationId() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
     * @return The reservationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasReservationDefinition()) {
        if (!getReservationDefinition().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queue_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getReservationDefinition());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeMessage(3, getReservationId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queue_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getReservationDefinition());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, getReservationId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto) obj;

      if (hasQueue() != other.hasQueue()) return false;
      if (hasQueue()) {
        if (!getQueue()
            .equals(other.getQueue())) return false;
      }
      if (hasReservationDefinition() != other.hasReservationDefinition()) return false;
      if (hasReservationDefinition()) {
        if (!getReservationDefinition()
            .equals(other.getReservationDefinition())) return false;
      }
      if (hasReservationId() != other.hasReservationId()) return false;
      if (hasReservationId()) {
        if (!getReservationId()
            .equals(other.getReservationId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasQueue()) {
        hash = (37 * hash) + QUEUE_FIELD_NUMBER;
        hash = (53 * hash) + getQueue().hashCode();
      }
      if (hasReservationDefinition()) {
        hash = (37 * hash) + RESERVATION_DEFINITION_FIELD_NUMBER;
        hash = (53 * hash) + getReservationDefinition().hashCode();
      }
      if (hasReservationId()) {
        hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getReservationId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationSubmissionRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationSubmissionRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getReservationDefinitionFieldBuilder();
          getReservationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        queue_ = "";
        reservationDefinition_ = null;
        if (reservationDefinitionBuilder_ != null) {
          reservationDefinitionBuilder_.dispose();
          reservationDefinitionBuilder_ = null;
        }
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.queue_ = queue_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.reservationDefinition_ = reservationDefinitionBuilder_ == null
              ? reservationDefinition_
              : reservationDefinitionBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.reservationId_ = reservationIdBuilder_ == null
              ? reservationId_
              : reservationIdBuilder_.build();
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.getDefaultInstance()) return this;
        if (other.hasQueue()) {
          queue_ = other.queue_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasReservationDefinition()) {
          mergeReservationDefinition(other.getReservationDefinition());
        }
        if (other.hasReservationId()) {
          mergeReservationId(other.getReservationId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasReservationDefinition()) {
          if (!getReservationDefinition().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                queue_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getReservationDefinitionFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                input.readMessage(
                    getReservationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object queue_ = "";
      /**
       * <code>optional string queue = 1;</code>
       * @return Whether the queue field is set.
       */
      public boolean hasQueue() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string queue = 1;</code>
       * @return The queue.
       */
      public java.lang.String getQueue() {
        java.lang.Object ref = queue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string queue = 1;</code>
       * @return The bytes for queue.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueBytes() {
        java.lang.Object ref = queue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string queue = 1;</code>
       * @param value The queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueue(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueue() {
        queue_ = getDefaultInstance().getQueue();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 1;</code>
       * @param value The bytes for queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> reservationDefinitionBuilder_;
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
       * @return Whether the reservationDefinition field is set.
       */
      public boolean hasReservationDefinition() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
       * @return The reservationDefinition.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() {
        if (reservationDefinitionBuilder_ == null) {
          return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
        } else {
          return reservationDefinitionBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
       */
      public Builder setReservationDefinition(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) {
        if (reservationDefinitionBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservationDefinition_ = value;
        } else {
          reservationDefinitionBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
       */
      public Builder setReservationDefinition(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder builderForValue) {
        if (reservationDefinitionBuilder_ == null) {
          reservationDefinition_ = builderForValue.build();
        } else {
          reservationDefinitionBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
       */
      public Builder mergeReservationDefinition(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) {
        if (reservationDefinitionBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            reservationDefinition_ != null &&
            reservationDefinition_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance()) {
            getReservationDefinitionBuilder().mergeFrom(value);
          } else {
            reservationDefinition_ = value;
          }
        } else {
          reservationDefinitionBuilder_.mergeFrom(value);
        }
        if (reservationDefinition_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
       */
      public Builder clearReservationDefinition() {
        bitField0_ = (bitField0_ & ~0x00000002);
        reservationDefinition_ = null;
        if (reservationDefinitionBuilder_ != null) {
          reservationDefinitionBuilder_.dispose();
          reservationDefinitionBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder getReservationDefinitionBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getReservationDefinitionFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() {
        if (reservationDefinitionBuilder_ != null) {
          return reservationDefinitionBuilder_.getMessageOrBuilder();
        } else {
          return reservationDefinition_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> 
          getReservationDefinitionFieldBuilder() {
        if (reservationDefinitionBuilder_ == null) {
          reservationDefinitionBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder>(
                  getReservationDefinition(),
                  getParentForChildren(),
                  isClean());
          reservationDefinition_ = null;
        }
        return reservationDefinitionBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
       * @return Whether the reservationId field is set.
       */
      public boolean hasReservationId() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
       * @return The reservationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
        if (reservationIdBuilder_ == null) {
          return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        } else {
          return reservationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
       */
      public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservationId_ = value;
        } else {
          reservationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
       */
      public Builder setReservationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) {
        if (reservationIdBuilder_ == null) {
          reservationId_ = builderForValue.build();
        } else {
          reservationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
       */
      public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0) &&
            reservationId_ != null &&
            reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) {
            getReservationIdBuilder().mergeFrom(value);
          } else {
            reservationId_ = value;
          }
        } else {
          reservationIdBuilder_.mergeFrom(value);
        }
        if (reservationId_ != null) {
          bitField0_ |= 0x00000004;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
       */
      public Builder clearReservationId() {
        bitField0_ = (bitField0_ & ~0x00000004);
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getReservationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
        if (reservationIdBuilder_ != null) {
          return reservationIdBuilder_.getMessageOrBuilder();
        } else {
          return reservationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 3;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> 
          getReservationIdFieldBuilder() {
        if (reservationIdBuilder_ == null) {
          reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>(
                  getReservationId(),
                  getParentForChildren(),
                  isClean());
          reservationId_ = null;
        }
        return reservationIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationSubmissionRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationSubmissionRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationSubmissionRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationSubmissionRequestProto>() {
      @java.lang.Override
      public ReservationSubmissionRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationSubmissionRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationSubmissionRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationSubmissionResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationSubmissionResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationSubmissionResponseProto}
   */
  public static final class ReservationSubmissionResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationSubmissionResponseProto)
      ReservationSubmissionResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationSubmissionResponseProto.newBuilder() to construct.
    private ReservationSubmissionResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationSubmissionResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationSubmissionResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationSubmissionResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationSubmissionResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationSubmissionResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationSubmissionResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationSubmissionResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationSubmissionResponseProto>() {
      @java.lang.Override
      public ReservationSubmissionResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationSubmissionResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationSubmissionResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationUpdateRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationUpdateRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     * @return Whether the reservationDefinition field is set.
     */
    boolean hasReservationDefinition();
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     * @return The reservationDefinition.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition();
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
     * @return Whether the reservationId field is set.
     */
    boolean hasReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
     * @return The reservationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationUpdateRequestProto}
   */
  public static final class ReservationUpdateRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationUpdateRequestProto)
      ReservationUpdateRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationUpdateRequestProto.newBuilder() to construct.
    private ReservationUpdateRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationUpdateRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationUpdateRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESERVATION_DEFINITION_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_;
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     * @return Whether the reservationDefinition field is set.
     */
    @java.lang.Override
    public boolean hasReservationDefinition() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     * @return The reservationDefinition.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() {
      return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
    }
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() {
      return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
    }

    public static final int RESERVATION_ID_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
     * @return Whether the reservationId field is set.
     */
    @java.lang.Override
    public boolean hasReservationId() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
     * @return The reservationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasReservationDefinition()) {
        if (!getReservationDefinition().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getReservationDefinition());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getReservationId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getReservationDefinition());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getReservationId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto) obj;

      if (hasReservationDefinition() != other.hasReservationDefinition()) return false;
      if (hasReservationDefinition()) {
        if (!getReservationDefinition()
            .equals(other.getReservationDefinition())) return false;
      }
      if (hasReservationId() != other.hasReservationId()) return false;
      if (hasReservationId()) {
        if (!getReservationId()
            .equals(other.getReservationId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasReservationDefinition()) {
        hash = (37 * hash) + RESERVATION_DEFINITION_FIELD_NUMBER;
        hash = (53 * hash) + getReservationDefinition().hashCode();
      }
      if (hasReservationId()) {
        hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getReservationId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationUpdateRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationUpdateRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getReservationDefinitionFieldBuilder();
          getReservationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        reservationDefinition_ = null;
        if (reservationDefinitionBuilder_ != null) {
          reservationDefinitionBuilder_.dispose();
          reservationDefinitionBuilder_ = null;
        }
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.reservationDefinition_ = reservationDefinitionBuilder_ == null
              ? reservationDefinition_
              : reservationDefinitionBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.reservationId_ = reservationIdBuilder_ == null
              ? reservationId_
              : reservationIdBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.getDefaultInstance()) return this;
        if (other.hasReservationDefinition()) {
          mergeReservationDefinition(other.getReservationDefinition());
        }
        if (other.hasReservationId()) {
          mergeReservationId(other.getReservationId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasReservationDefinition()) {
          if (!getReservationDefinition().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getReservationDefinitionFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getReservationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> reservationDefinitionBuilder_;
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       * @return Whether the reservationDefinition field is set.
       */
      public boolean hasReservationDefinition() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       * @return The reservationDefinition.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() {
        if (reservationDefinitionBuilder_ == null) {
          return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
        } else {
          return reservationDefinitionBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public Builder setReservationDefinition(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) {
        if (reservationDefinitionBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservationDefinition_ = value;
        } else {
          reservationDefinitionBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public Builder setReservationDefinition(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder builderForValue) {
        if (reservationDefinitionBuilder_ == null) {
          reservationDefinition_ = builderForValue.build();
        } else {
          reservationDefinitionBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public Builder mergeReservationDefinition(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) {
        if (reservationDefinitionBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            reservationDefinition_ != null &&
            reservationDefinition_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance()) {
            getReservationDefinitionBuilder().mergeFrom(value);
          } else {
            reservationDefinition_ = value;
          }
        } else {
          reservationDefinitionBuilder_.mergeFrom(value);
        }
        if (reservationDefinition_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public Builder clearReservationDefinition() {
        bitField0_ = (bitField0_ & ~0x00000001);
        reservationDefinition_ = null;
        if (reservationDefinitionBuilder_ != null) {
          reservationDefinitionBuilder_.dispose();
          reservationDefinitionBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder getReservationDefinitionBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getReservationDefinitionFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() {
        if (reservationDefinitionBuilder_ != null) {
          return reservationDefinitionBuilder_.getMessageOrBuilder();
        } else {
          return reservationDefinition_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> 
          getReservationDefinitionFieldBuilder() {
        if (reservationDefinitionBuilder_ == null) {
          reservationDefinitionBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder>(
                  getReservationDefinition(),
                  getParentForChildren(),
                  isClean());
          reservationDefinition_ = null;
        }
        return reservationDefinitionBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
       * @return Whether the reservationId field is set.
       */
      public boolean hasReservationId() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
       * @return The reservationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
        if (reservationIdBuilder_ == null) {
          return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        } else {
          return reservationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
       */
      public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservationId_ = value;
        } else {
          reservationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
       */
      public Builder setReservationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) {
        if (reservationIdBuilder_ == null) {
          reservationId_ = builderForValue.build();
        } else {
          reservationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
       */
      public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            reservationId_ != null &&
            reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) {
            getReservationIdBuilder().mergeFrom(value);
          } else {
            reservationId_ = value;
          }
        } else {
          reservationIdBuilder_.mergeFrom(value);
        }
        if (reservationId_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
       */
      public Builder clearReservationId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getReservationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
        if (reservationIdBuilder_ != null) {
          return reservationIdBuilder_.getMessageOrBuilder();
        } else {
          return reservationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> 
          getReservationIdFieldBuilder() {
        if (reservationIdBuilder_ == null) {
          reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>(
                  getReservationId(),
                  getParentForChildren(),
                  isClean());
          reservationId_ = null;
        }
        return reservationIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationUpdateRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationUpdateRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationUpdateRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationUpdateRequestProto>() {
      @java.lang.Override
      public ReservationUpdateRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationUpdateRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationUpdateRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationUpdateResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationUpdateResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationUpdateResponseProto}
   */
  public static final class ReservationUpdateResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationUpdateResponseProto)
      ReservationUpdateResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationUpdateResponseProto.newBuilder() to construct.
    private ReservationUpdateResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationUpdateResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationUpdateResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationUpdateResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationUpdateResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationUpdateResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationUpdateResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationUpdateResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationUpdateResponseProto>() {
      @java.lang.Override
      public ReservationUpdateResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationUpdateResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationUpdateResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationDeleteRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationDeleteRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     * @return Whether the reservationId field is set.
     */
    boolean hasReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     * @return The reservationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationDeleteRequestProto}
   */
  public static final class ReservationDeleteRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationDeleteRequestProto)
      ReservationDeleteRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationDeleteRequestProto.newBuilder() to construct.
    private ReservationDeleteRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationDeleteRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationDeleteRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESERVATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     * @return Whether the reservationId field is set.
     */
    @java.lang.Override
    public boolean hasReservationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     * @return The reservationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getReservationId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getReservationId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto) obj;

      if (hasReservationId() != other.hasReservationId()) return false;
      if (hasReservationId()) {
        if (!getReservationId()
            .equals(other.getReservationId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasReservationId()) {
        hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getReservationId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationDeleteRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationDeleteRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getReservationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.reservationId_ = reservationIdBuilder_ == null
              ? reservationId_
              : reservationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.getDefaultInstance()) return this;
        if (other.hasReservationId()) {
          mergeReservationId(other.getReservationId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getReservationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       * @return Whether the reservationId field is set.
       */
      public boolean hasReservationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       * @return The reservationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
        if (reservationIdBuilder_ == null) {
          return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        } else {
          return reservationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservationId_ = value;
        } else {
          reservationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public Builder setReservationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) {
        if (reservationIdBuilder_ == null) {
          reservationId_ = builderForValue.build();
        } else {
          reservationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            reservationId_ != null &&
            reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) {
            getReservationIdBuilder().mergeFrom(value);
          } else {
            reservationId_ = value;
          }
        } else {
          reservationIdBuilder_.mergeFrom(value);
        }
        if (reservationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public Builder clearReservationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getReservationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
        if (reservationIdBuilder_ != null) {
          return reservationIdBuilder_.getMessageOrBuilder();
        } else {
          return reservationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> 
          getReservationIdFieldBuilder() {
        if (reservationIdBuilder_ == null) {
          reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>(
                  getReservationId(),
                  getParentForChildren(),
                  isClean());
          reservationId_ = null;
        }
        return reservationIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationDeleteRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationDeleteRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationDeleteRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationDeleteRequestProto>() {
      @java.lang.Override
      public ReservationDeleteRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationDeleteRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationDeleteRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationDeleteResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationDeleteResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationDeleteResponseProto}
   */
  public static final class ReservationDeleteResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationDeleteResponseProto)
      ReservationDeleteResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationDeleteResponseProto.newBuilder() to construct.
    private ReservationDeleteResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationDeleteResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationDeleteResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationDeleteResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationDeleteResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationDeleteResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationDeleteResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationDeleteResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationDeleteResponseProto>() {
      @java.lang.Override
      public ReservationDeleteResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationDeleteResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationDeleteResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationListRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationListRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string queue = 1;</code>
     * @return Whether the queue field is set.
     */
    boolean hasQueue();
    /**
     * <code>optional string queue = 1;</code>
     * @return The queue.
     */
    java.lang.String getQueue();
    /**
     * <code>optional string queue = 1;</code>
     * @return The bytes for queue.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes();

    /**
     * <code>optional string reservation_id = 3;</code>
     * @return Whether the reservationId field is set.
     */
    boolean hasReservationId();
    /**
     * <code>optional string reservation_id = 3;</code>
     * @return The reservationId.
     */
    java.lang.String getReservationId();
    /**
     * <code>optional string reservation_id = 3;</code>
     * @return The bytes for reservationId.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getReservationIdBytes();

    /**
     * <code>optional int64 start_time = 4;</code>
     * @return Whether the startTime field is set.
     */
    boolean hasStartTime();
    /**
     * <code>optional int64 start_time = 4;</code>
     * @return The startTime.
     */
    long getStartTime();

    /**
     * <code>optional int64 end_time = 5;</code>
     * @return Whether the endTime field is set.
     */
    boolean hasEndTime();
    /**
     * <code>optional int64 end_time = 5;</code>
     * @return The endTime.
     */
    long getEndTime();

    /**
     * <code>optional bool include_resource_allocations = 6;</code>
     * @return Whether the includeResourceAllocations field is set.
     */
    boolean hasIncludeResourceAllocations();
    /**
     * <code>optional bool include_resource_allocations = 6;</code>
     * @return The includeResourceAllocations.
     */
    boolean getIncludeResourceAllocations();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationListRequestProto}
   */
  public static final class ReservationListRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationListRequestProto)
      ReservationListRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationListRequestProto.newBuilder() to construct.
    private ReservationListRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationListRequestProto() {
      queue_ = "";
      reservationId_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationListRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int QUEUE_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object queue_ = "";
    /**
     * <code>optional string queue = 1;</code>
     * @return Whether the queue field is set.
     */
    @java.lang.Override
    public boolean hasQueue() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string queue = 1;</code>
     * @return The queue.
     */
    @java.lang.Override
    public java.lang.String getQueue() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queue_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string queue = 1;</code>
     * @return The bytes for queue.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RESERVATION_ID_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object reservationId_ = "";
    /**
     * <code>optional string reservation_id = 3;</code>
     * @return Whether the reservationId field is set.
     */
    @java.lang.Override
    public boolean hasReservationId() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string reservation_id = 3;</code>
     * @return The reservationId.
     */
    @java.lang.Override
    public java.lang.String getReservationId() {
      java.lang.Object ref = reservationId_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          reservationId_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string reservation_id = 3;</code>
     * @return The bytes for reservationId.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getReservationIdBytes() {
      java.lang.Object ref = reservationId_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        reservationId_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int START_TIME_FIELD_NUMBER = 4;
    private long startTime_ = 0L;
    /**
     * <code>optional int64 start_time = 4;</code>
     * @return Whether the startTime field is set.
     */
    @java.lang.Override
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int64 start_time = 4;</code>
     * @return The startTime.
     */
    @java.lang.Override
    public long getStartTime() {
      return startTime_;
    }

    public static final int END_TIME_FIELD_NUMBER = 5;
    private long endTime_ = 0L;
    /**
     * <code>optional int64 end_time = 5;</code>
     * @return Whether the endTime field is set.
     */
    @java.lang.Override
    public boolean hasEndTime() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional int64 end_time = 5;</code>
     * @return The endTime.
     */
    @java.lang.Override
    public long getEndTime() {
      return endTime_;
    }

    public static final int INCLUDE_RESOURCE_ALLOCATIONS_FIELD_NUMBER = 6;
    private boolean includeResourceAllocations_ = false;
    /**
     * <code>optional bool include_resource_allocations = 6;</code>
     * @return Whether the includeResourceAllocations field is set.
     */
    @java.lang.Override
    public boolean hasIncludeResourceAllocations() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional bool include_resource_allocations = 6;</code>
     * @return The includeResourceAllocations.
     */
    @java.lang.Override
    public boolean getIncludeResourceAllocations() {
      return includeResourceAllocations_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queue_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, reservationId_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(4, startTime_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt64(5, endTime_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeBool(6, includeResourceAllocations_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queue_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, reservationId_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(4, startTime_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(5, endTime_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(6, includeResourceAllocations_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto) obj;

      if (hasQueue() != other.hasQueue()) return false;
      if (hasQueue()) {
        if (!getQueue()
            .equals(other.getQueue())) return false;
      }
      if (hasReservationId() != other.hasReservationId()) return false;
      if (hasReservationId()) {
        if (!getReservationId()
            .equals(other.getReservationId())) return false;
      }
      if (hasStartTime() != other.hasStartTime()) return false;
      if (hasStartTime()) {
        if (getStartTime()
            != other.getStartTime()) return false;
      }
      if (hasEndTime() != other.hasEndTime()) return false;
      if (hasEndTime()) {
        if (getEndTime()
            != other.getEndTime()) return false;
      }
      if (hasIncludeResourceAllocations() != other.hasIncludeResourceAllocations()) return false;
      if (hasIncludeResourceAllocations()) {
        if (getIncludeResourceAllocations()
            != other.getIncludeResourceAllocations()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasQueue()) {
        hash = (37 * hash) + QUEUE_FIELD_NUMBER;
        hash = (53 * hash) + getQueue().hashCode();
      }
      if (hasReservationId()) {
        hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getReservationId().hashCode();
      }
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartTime());
      }
      if (hasEndTime()) {
        hash = (37 * hash) + END_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getEndTime());
      }
      if (hasIncludeResourceAllocations()) {
        hash = (37 * hash) + INCLUDE_RESOURCE_ALLOCATIONS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getIncludeResourceAllocations());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationListRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationListRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        queue_ = "";
        reservationId_ = "";
        startTime_ = 0L;
        endTime_ = 0L;
        includeResourceAllocations_ = false;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.queue_ = queue_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.reservationId_ = reservationId_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.startTime_ = startTime_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.endTime_ = endTime_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.includeResourceAllocations_ = includeResourceAllocations_;
          to_bitField0_ |= 0x00000010;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.getDefaultInstance()) return this;
        if (other.hasQueue()) {
          queue_ = other.queue_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasReservationId()) {
          reservationId_ = other.reservationId_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasEndTime()) {
          setEndTime(other.getEndTime());
        }
        if (other.hasIncludeResourceAllocations()) {
          setIncludeResourceAllocations(other.getIncludeResourceAllocations());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                queue_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 26: {
                reservationId_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 26
              case 32: {
                startTime_ = input.readInt64();
                bitField0_ |= 0x00000004;
                break;
              } // case 32
              case 40: {
                endTime_ = input.readInt64();
                bitField0_ |= 0x00000008;
                break;
              } // case 40
              case 48: {
                includeResourceAllocations_ = input.readBool();
                bitField0_ |= 0x00000010;
                break;
              } // case 48
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object queue_ = "";
      /**
       * <code>optional string queue = 1;</code>
       * @return Whether the queue field is set.
       */
      public boolean hasQueue() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string queue = 1;</code>
       * @return The queue.
       */
      public java.lang.String getQueue() {
        java.lang.Object ref = queue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string queue = 1;</code>
       * @return The bytes for queue.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueBytes() {
        java.lang.Object ref = queue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string queue = 1;</code>
       * @param value The queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueue(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueue() {
        queue_ = getDefaultInstance().getQueue();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 1;</code>
       * @param value The bytes for queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object reservationId_ = "";
      /**
       * <code>optional string reservation_id = 3;</code>
       * @return Whether the reservationId field is set.
       */
      public boolean hasReservationId() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string reservation_id = 3;</code>
       * @return The reservationId.
       */
      public java.lang.String getReservationId() {
        java.lang.Object ref = reservationId_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            reservationId_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string reservation_id = 3;</code>
       * @return The bytes for reservationId.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getReservationIdBytes() {
        java.lang.Object ref = reservationId_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          reservationId_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string reservation_id = 3;</code>
       * @param value The reservationId to set.
       * @return This builder for chaining.
       */
      public Builder setReservationId(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        reservationId_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string reservation_id = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearReservationId() {
        reservationId_ = getDefaultInstance().getReservationId();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string reservation_id = 3;</code>
       * @param value The bytes for reservationId to set.
       * @return This builder for chaining.
       */
      public Builder setReservationIdBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        reservationId_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private long startTime_ ;
      /**
       * <code>optional int64 start_time = 4;</code>
       * @return Whether the startTime field is set.
       */
      @java.lang.Override
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int64 start_time = 4;</code>
       * @return The startTime.
       */
      @java.lang.Override
      public long getStartTime() {
        return startTime_;
      }
      /**
       * <code>optional int64 start_time = 4;</code>
       * @param value The startTime to set.
       * @return This builder for chaining.
       */
      public Builder setStartTime(long value) {

        startTime_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 start_time = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000004);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      private long endTime_ ;
      /**
       * <code>optional int64 end_time = 5;</code>
       * @return Whether the endTime field is set.
       */
      @java.lang.Override
      public boolean hasEndTime() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int64 end_time = 5;</code>
       * @return The endTime.
       */
      @java.lang.Override
      public long getEndTime() {
        return endTime_;
      }
      /**
       * <code>optional int64 end_time = 5;</code>
       * @param value The endTime to set.
       * @return This builder for chaining.
       */
      public Builder setEndTime(long value) {

        endTime_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 end_time = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearEndTime() {
        bitField0_ = (bitField0_ & ~0x00000008);
        endTime_ = 0L;
        onChanged();
        return this;
      }

      private boolean includeResourceAllocations_ ;
      /**
       * <code>optional bool include_resource_allocations = 6;</code>
       * @return Whether the includeResourceAllocations field is set.
       */
      @java.lang.Override
      public boolean hasIncludeResourceAllocations() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional bool include_resource_allocations = 6;</code>
       * @return The includeResourceAllocations.
       */
      @java.lang.Override
      public boolean getIncludeResourceAllocations() {
        return includeResourceAllocations_;
      }
      /**
       * <code>optional bool include_resource_allocations = 6;</code>
       * @param value The includeResourceAllocations to set.
       * @return This builder for chaining.
       */
      public Builder setIncludeResourceAllocations(boolean value) {

        includeResourceAllocations_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool include_resource_allocations = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearIncludeResourceAllocations() {
        bitField0_ = (bitField0_ & ~0x00000010);
        includeResourceAllocations_ = false;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationListRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationListRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationListRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationListRequestProto>() {
      @java.lang.Override
      public ReservationListRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationListRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationListRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationListResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationListResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto> 
        getReservationsList();
    /**
     * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getReservations(int index);
    /**
     * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
     */
    int getReservationsCount();
    /**
     * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder> 
        getReservationsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder getReservationsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationListResponseProto}
   */
  public static final class ReservationListResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationListResponseProto)
      ReservationListResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationListResponseProto.newBuilder() to construct.
    private ReservationListResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationListResponseProto() {
      reservations_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationListResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.Builder.class);
    }

    public static final int RESERVATIONS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto> reservations_;
    /**
     * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto> getReservationsList() {
      return reservations_;
    }
    /**
     * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder> 
        getReservationsOrBuilderList() {
      return reservations_;
    }
    /**
     * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
     */
    @java.lang.Override
    public int getReservationsCount() {
      return reservations_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getReservations(int index) {
      return reservations_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder getReservationsOrBuilder(
        int index) {
      return reservations_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getReservationsCount(); i++) {
        if (!getReservations(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < reservations_.size(); i++) {
        output.writeMessage(1, reservations_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < reservations_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, reservations_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto) obj;

      if (!getReservationsList()
          .equals(other.getReservationsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getReservationsCount() > 0) {
        hash = (37 * hash) + RESERVATIONS_FIELD_NUMBER;
        hash = (53 * hash) + getReservationsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationListResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationListResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (reservationsBuilder_ == null) {
          reservations_ = java.util.Collections.emptyList();
        } else {
          reservations_ = null;
          reservationsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto result) {
        if (reservationsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            reservations_ = java.util.Collections.unmodifiableList(reservations_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.reservations_ = reservations_;
        } else {
          result.reservations_ = reservationsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.getDefaultInstance()) return this;
        if (reservationsBuilder_ == null) {
          if (!other.reservations_.isEmpty()) {
            if (reservations_.isEmpty()) {
              reservations_ = other.reservations_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureReservationsIsMutable();
              reservations_.addAll(other.reservations_);
            }
            onChanged();
          }
        } else {
          if (!other.reservations_.isEmpty()) {
            if (reservationsBuilder_.isEmpty()) {
              reservationsBuilder_.dispose();
              reservationsBuilder_ = null;
              reservations_ = other.reservations_;
              bitField0_ = (bitField0_ & ~0x00000001);
              reservationsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getReservationsFieldBuilder() : null;
            } else {
              reservationsBuilder_.addAllMessages(other.reservations_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getReservationsCount(); i++) {
          if (!getReservations(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.PARSER,
                        extensionRegistry);
                if (reservationsBuilder_ == null) {
                  ensureReservationsIsMutable();
                  reservations_.add(m);
                } else {
                  reservationsBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto> reservations_ =
        java.util.Collections.emptyList();
      private void ensureReservationsIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          reservations_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto>(reservations_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder> reservationsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto> getReservationsList() {
        if (reservationsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(reservations_);
        } else {
          return reservationsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public int getReservationsCount() {
        if (reservationsBuilder_ == null) {
          return reservations_.size();
        } else {
          return reservationsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getReservations(int index) {
        if (reservationsBuilder_ == null) {
          return reservations_.get(index);
        } else {
          return reservationsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public Builder setReservations(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto value) {
        if (reservationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureReservationsIsMutable();
          reservations_.set(index, value);
          onChanged();
        } else {
          reservationsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public Builder setReservations(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder builderForValue) {
        if (reservationsBuilder_ == null) {
          ensureReservationsIsMutable();
          reservations_.set(index, builderForValue.build());
          onChanged();
        } else {
          reservationsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public Builder addReservations(org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto value) {
        if (reservationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureReservationsIsMutable();
          reservations_.add(value);
          onChanged();
        } else {
          reservationsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public Builder addReservations(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto value) {
        if (reservationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureReservationsIsMutable();
          reservations_.add(index, value);
          onChanged();
        } else {
          reservationsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public Builder addReservations(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder builderForValue) {
        if (reservationsBuilder_ == null) {
          ensureReservationsIsMutable();
          reservations_.add(builderForValue.build());
          onChanged();
        } else {
          reservationsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public Builder addReservations(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder builderForValue) {
        if (reservationsBuilder_ == null) {
          ensureReservationsIsMutable();
          reservations_.add(index, builderForValue.build());
          onChanged();
        } else {
          reservationsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public Builder addAllReservations(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto> values) {
        if (reservationsBuilder_ == null) {
          ensureReservationsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, reservations_);
          onChanged();
        } else {
          reservationsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public Builder clearReservations() {
        if (reservationsBuilder_ == null) {
          reservations_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          reservationsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public Builder removeReservations(int index) {
        if (reservationsBuilder_ == null) {
          ensureReservationsIsMutable();
          reservations_.remove(index);
          onChanged();
        } else {
          reservationsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder getReservationsBuilder(
          int index) {
        return getReservationsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder getReservationsOrBuilder(
          int index) {
        if (reservationsBuilder_ == null) {
          return reservations_.get(index);  } else {
          return reservationsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder> 
           getReservationsOrBuilderList() {
        if (reservationsBuilder_ != null) {
          return reservationsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(reservations_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder addReservationsBuilder() {
        return getReservationsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder addReservationsBuilder(
          int index) {
        return getReservationsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder> 
           getReservationsBuilderList() {
        return getReservationsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder> 
          getReservationsFieldBuilder() {
        if (reservationsBuilder_ == null) {
          reservationsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder>(
                  reservations_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          reservations_ = null;
        }
        return reservationsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationListResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationListResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationListResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationListResponseProto>() {
      @java.lang.Override
      public ReservationListResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationListResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationListResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface RunSharedCacheCleanerTaskRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.RunSharedCacheCleanerTaskRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.yarn.RunSharedCacheCleanerTaskRequestProto}
   */
  public static final class RunSharedCacheCleanerTaskRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.RunSharedCacheCleanerTaskRequestProto)
      RunSharedCacheCleanerTaskRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use RunSharedCacheCleanerTaskRequestProto.newBuilder() to construct.
    private RunSharedCacheCleanerTaskRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private RunSharedCacheCleanerTaskRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new RunSharedCacheCleanerTaskRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.RunSharedCacheCleanerTaskRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.RunSharedCacheCleanerTaskRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.RunSharedCacheCleanerTaskRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.RunSharedCacheCleanerTaskRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<RunSharedCacheCleanerTaskRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<RunSharedCacheCleanerTaskRequestProto>() {
      @java.lang.Override
      public RunSharedCacheCleanerTaskRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<RunSharedCacheCleanerTaskRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<RunSharedCacheCleanerTaskRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface RunSharedCacheCleanerTaskResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.RunSharedCacheCleanerTaskResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional bool accepted = 1;</code>
     * @return Whether the accepted field is set.
     */
    boolean hasAccepted();
    /**
     * <code>optional bool accepted = 1;</code>
     * @return The accepted.
     */
    boolean getAccepted();
  }
  /**
   * Protobuf type {@code hadoop.yarn.RunSharedCacheCleanerTaskResponseProto}
   */
  public static final class RunSharedCacheCleanerTaskResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.RunSharedCacheCleanerTaskResponseProto)
      RunSharedCacheCleanerTaskResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use RunSharedCacheCleanerTaskResponseProto.newBuilder() to construct.
    private RunSharedCacheCleanerTaskResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private RunSharedCacheCleanerTaskResponseProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new RunSharedCacheCleanerTaskResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.Builder.class);
    }

    private int bitField0_;
    public static final int ACCEPTED_FIELD_NUMBER = 1;
    private boolean accepted_ = false;
    /**
     * <code>optional bool accepted = 1;</code>
     * @return Whether the accepted field is set.
     */
    @java.lang.Override
    public boolean hasAccepted() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional bool accepted = 1;</code>
     * @return The accepted.
     */
    @java.lang.Override
    public boolean getAccepted() {
      return accepted_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeBool(1, accepted_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(1, accepted_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto) obj;

      if (hasAccepted() != other.hasAccepted()) return false;
      if (hasAccepted()) {
        if (getAccepted()
            != other.getAccepted()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasAccepted()) {
        hash = (37 * hash) + ACCEPTED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getAccepted());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.RunSharedCacheCleanerTaskResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.RunSharedCacheCleanerTaskResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        accepted_ = false;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.accepted_ = accepted_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.getDefaultInstance()) return this;
        if (other.hasAccepted()) {
          setAccepted(other.getAccepted());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                accepted_ = input.readBool();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private boolean accepted_ ;
      /**
       * <code>optional bool accepted = 1;</code>
       * @return Whether the accepted field is set.
       */
      @java.lang.Override
      public boolean hasAccepted() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional bool accepted = 1;</code>
       * @return The accepted.
       */
      @java.lang.Override
      public boolean getAccepted() {
        return accepted_;
      }
      /**
       * <code>optional bool accepted = 1;</code>
       * @param value The accepted to set.
       * @return This builder for chaining.
       */
      public Builder setAccepted(boolean value) {

        accepted_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool accepted = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearAccepted() {
        bitField0_ = (bitField0_ & ~0x00000001);
        accepted_ = false;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.RunSharedCacheCleanerTaskResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.RunSharedCacheCleanerTaskResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<RunSharedCacheCleanerTaskResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<RunSharedCacheCleanerTaskResponseProto>() {
      @java.lang.Override
      public RunSharedCacheCleanerTaskResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<RunSharedCacheCleanerTaskResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<RunSharedCacheCleanerTaskResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetLocalizationStatusesRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetLocalizationStatusesRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> 
        getContainerIdList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    int getContainerIdCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getContainerIdOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(
        int index);
  }
  /**
   * <pre>
   * Localization
   * </pre>
   *
   * Protobuf type {@code hadoop.yarn.GetLocalizationStatusesRequestProto}
   */
  public static final class GetLocalizationStatusesRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetLocalizationStatusesRequestProto)
      GetLocalizationStatusesRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetLocalizationStatusesRequestProto.newBuilder() to construct.
    private GetLocalizationStatusesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetLocalizationStatusesRequestProto() {
      containerId_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetLocalizationStatusesRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.Builder.class);
    }

    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> containerId_;
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getContainerIdList() {
      return containerId_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
        getContainerIdOrBuilderList() {
      return containerId_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public int getContainerIdCount() {
      return containerId_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) {
      return containerId_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(
        int index) {
      return containerId_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < containerId_.size(); i++) {
        output.writeMessage(1, containerId_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < containerId_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, containerId_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto) obj;

      if (!getContainerIdList()
          .equals(other.getContainerIdList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getContainerIdCount() > 0) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerIdList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * <pre>
     * Localization
     * </pre>
     *
     * Protobuf type {@code hadoop.yarn.GetLocalizationStatusesRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetLocalizationStatusesRequestProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (containerIdBuilder_ == null) {
          containerId_ = java.util.Collections.emptyList();
        } else {
          containerId_ = null;
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto result) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            containerId_ = java.util.Collections.unmodifiableList(containerId_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.containerId_ = containerId_;
        } else {
          result.containerId_ = containerIdBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.getDefaultInstance()) return this;
        if (containerIdBuilder_ == null) {
          if (!other.containerId_.isEmpty()) {
            if (containerId_.isEmpty()) {
              containerId_ = other.containerId_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureContainerIdIsMutable();
              containerId_.addAll(other.containerId_);
            }
            onChanged();
          }
        } else {
          if (!other.containerId_.isEmpty()) {
            if (containerIdBuilder_.isEmpty()) {
              containerIdBuilder_.dispose();
              containerIdBuilder_ = null;
              containerId_ = other.containerId_;
              bitField0_ = (bitField0_ & ~0x00000001);
              containerIdBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getContainerIdFieldBuilder() : null;
            } else {
              containerIdBuilder_.addAllMessages(other.containerId_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER,
                        extensionRegistry);
                if (containerIdBuilder_ == null) {
                  ensureContainerIdIsMutable();
                  containerId_.add(m);
                } else {
                  containerIdBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> containerId_ =
        java.util.Collections.emptyList();
      private void ensureContainerIdIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          containerId_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto>(containerId_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> getContainerIdList() {
        if (containerIdBuilder_ == null) {
          return java.util.Collections.unmodifiableList(containerId_);
        } else {
          return containerIdBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public int getContainerIdCount() {
        if (containerIdBuilder_ == null) {
          return containerId_.size();
        } else {
          return containerIdBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) {
        if (containerIdBuilder_ == null) {
          return containerId_.get(index);
        } else {
          return containerIdBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIdIsMutable();
          containerId_.set(index, value);
          onChanged();
        } else {
          containerIdBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.set(index, builderForValue.build());
          onChanged();
        } else {
          containerIdBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIdIsMutable();
          containerId_.add(value);
          onChanged();
        } else {
          containerIdBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIdIsMutable();
          containerId_.add(index, value);
          onChanged();
        } else {
          containerIdBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.add(builderForValue.build());
          onChanged();
        } else {
          containerIdBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addContainerId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.add(index, builderForValue.build());
          onChanged();
        } else {
          containerIdBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder addAllContainerId(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> values) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, containerId_);
          onChanged();
        } else {
          containerIdBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        if (containerIdBuilder_ == null) {
          containerId_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          containerIdBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder removeContainerId(int index) {
        if (containerIdBuilder_ == null) {
          ensureContainerIdIsMutable();
          containerId_.remove(index);
          onChanged();
        } else {
          containerIdBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder(
          int index) {
        return getContainerIdFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(
          int index) {
        if (containerIdBuilder_ == null) {
          return containerId_.get(index);  } else {
          return containerIdBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
           getContainerIdOrBuilderList() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(containerId_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder() {
        return getContainerIdFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder(
          int index) {
        return getContainerIdFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder> 
           getContainerIdBuilderList() {
        return getContainerIdFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  containerId_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetLocalizationStatusesRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetLocalizationStatusesRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetLocalizationStatusesRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetLocalizationStatusesRequestProto>() {
      @java.lang.Override
      public GetLocalizationStatusesRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetLocalizationStatusesRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetLocalizationStatusesRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetLocalizationStatusesResponseProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetLocalizationStatusesResponseProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto> 
        getCntnLocalizationStatusesList();
    /**
     * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getCntnLocalizationStatuses(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
     */
    int getCntnLocalizationStatusesCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder> 
        getCntnLocalizationStatusesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder getCntnLocalizationStatusesOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> 
        getFailedRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    int getFailedRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetLocalizationStatusesResponseProto}
   */
  public static final class GetLocalizationStatusesResponseProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetLocalizationStatusesResponseProto)
      GetLocalizationStatusesResponseProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetLocalizationStatusesResponseProto.newBuilder() to construct.
    private GetLocalizationStatusesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetLocalizationStatusesResponseProto() {
      cntnLocalizationStatuses_ = java.util.Collections.emptyList();
      failedRequests_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetLocalizationStatusesResponseProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.Builder.class);
    }

    public static final int CNTN_LOCALIZATION_STATUSES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto> cntnLocalizationStatuses_;
    /**
     * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto> getCntnLocalizationStatusesList() {
      return cntnLocalizationStatuses_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder> 
        getCntnLocalizationStatusesOrBuilderList() {
      return cntnLocalizationStatuses_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
     */
    @java.lang.Override
    public int getCntnLocalizationStatusesCount() {
      return cntnLocalizationStatuses_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getCntnLocalizationStatuses(int index) {
      return cntnLocalizationStatuses_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder getCntnLocalizationStatusesOrBuilder(
        int index) {
      return cntnLocalizationStatuses_.get(index);
    }

    public static final int FAILED_REQUESTS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_;
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
        getFailedRequestsOrBuilderList() {
      return failedRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public int getFailedRequestsCount() {
      return failedRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
      return failedRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
        int index) {
      return failedRequests_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < cntnLocalizationStatuses_.size(); i++) {
        output.writeMessage(1, cntnLocalizationStatuses_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        output.writeMessage(2, failedRequests_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < cntnLocalizationStatuses_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, cntnLocalizationStatuses_.get(i));
      }
      for (int i = 0; i < failedRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, failedRequests_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto) obj;

      if (!getCntnLocalizationStatusesList()
          .equals(other.getCntnLocalizationStatusesList())) return false;
      if (!getFailedRequestsList()
          .equals(other.getFailedRequestsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getCntnLocalizationStatusesCount() > 0) {
        hash = (37 * hash) + CNTN_LOCALIZATION_STATUSES_FIELD_NUMBER;
        hash = (53 * hash) + getCntnLocalizationStatusesList().hashCode();
      }
      if (getFailedRequestsCount() > 0) {
        hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getFailedRequestsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetLocalizationStatusesResponseProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetLocalizationStatusesResponseProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (cntnLocalizationStatusesBuilder_ == null) {
          cntnLocalizationStatuses_ = java.util.Collections.emptyList();
        } else {
          cntnLocalizationStatuses_ = null;
          cntnLocalizationStatusesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
        } else {
          failedRequests_ = null;
          failedRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto result) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            cntnLocalizationStatuses_ = java.util.Collections.unmodifiableList(cntnLocalizationStatuses_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.cntnLocalizationStatuses_ = cntnLocalizationStatuses_;
        } else {
          result.cntnLocalizationStatuses_ = cntnLocalizationStatusesBuilder_.build();
        }
        if (failedRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.failedRequests_ = failedRequests_;
        } else {
          result.failedRequests_ = failedRequestsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.getDefaultInstance()) return this;
        if (cntnLocalizationStatusesBuilder_ == null) {
          if (!other.cntnLocalizationStatuses_.isEmpty()) {
            if (cntnLocalizationStatuses_.isEmpty()) {
              cntnLocalizationStatuses_ = other.cntnLocalizationStatuses_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureCntnLocalizationStatusesIsMutable();
              cntnLocalizationStatuses_.addAll(other.cntnLocalizationStatuses_);
            }
            onChanged();
          }
        } else {
          if (!other.cntnLocalizationStatuses_.isEmpty()) {
            if (cntnLocalizationStatusesBuilder_.isEmpty()) {
              cntnLocalizationStatusesBuilder_.dispose();
              cntnLocalizationStatusesBuilder_ = null;
              cntnLocalizationStatuses_ = other.cntnLocalizationStatuses_;
              bitField0_ = (bitField0_ & ~0x00000001);
              cntnLocalizationStatusesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getCntnLocalizationStatusesFieldBuilder() : null;
            } else {
              cntnLocalizationStatusesBuilder_.addAllMessages(other.cntnLocalizationStatuses_);
            }
          }
        }
        if (failedRequestsBuilder_ == null) {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequests_.isEmpty()) {
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureFailedRequestsIsMutable();
              failedRequests_.addAll(other.failedRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.failedRequests_.isEmpty()) {
            if (failedRequestsBuilder_.isEmpty()) {
              failedRequestsBuilder_.dispose();
              failedRequestsBuilder_ = null;
              failedRequests_ = other.failedRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
              failedRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getFailedRequestsFieldBuilder() : null;
            } else {
              failedRequestsBuilder_.addAllMessages(other.failedRequests_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.PARSER,
                        extensionRegistry);
                if (cntnLocalizationStatusesBuilder_ == null) {
                  ensureCntnLocalizationStatusesIsMutable();
                  cntnLocalizationStatuses_.add(m);
                } else {
                  cntnLocalizationStatusesBuilder_.addMessage(m);
                }
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER,
                        extensionRegistry);
                if (failedRequestsBuilder_ == null) {
                  ensureFailedRequestsIsMutable();
                  failedRequests_.add(m);
                } else {
                  failedRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto> cntnLocalizationStatuses_ =
        java.util.Collections.emptyList();
      private void ensureCntnLocalizationStatusesIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          cntnLocalizationStatuses_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto>(cntnLocalizationStatuses_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder> cntnLocalizationStatusesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto> getCntnLocalizationStatusesList() {
        if (cntnLocalizationStatusesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(cntnLocalizationStatuses_);
        } else {
          return cntnLocalizationStatusesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public int getCntnLocalizationStatusesCount() {
        if (cntnLocalizationStatusesBuilder_ == null) {
          return cntnLocalizationStatuses_.size();
        } else {
          return cntnLocalizationStatusesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getCntnLocalizationStatuses(int index) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          return cntnLocalizationStatuses_.get(index);
        } else {
          return cntnLocalizationStatusesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public Builder setCntnLocalizationStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto value) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCntnLocalizationStatusesIsMutable();
          cntnLocalizationStatuses_.set(index, value);
          onChanged();
        } else {
          cntnLocalizationStatusesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public Builder setCntnLocalizationStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder builderForValue) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          ensureCntnLocalizationStatusesIsMutable();
          cntnLocalizationStatuses_.set(index, builderForValue.build());
          onChanged();
        } else {
          cntnLocalizationStatusesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public Builder addCntnLocalizationStatuses(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto value) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCntnLocalizationStatusesIsMutable();
          cntnLocalizationStatuses_.add(value);
          onChanged();
        } else {
          cntnLocalizationStatusesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public Builder addCntnLocalizationStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto value) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCntnLocalizationStatusesIsMutable();
          cntnLocalizationStatuses_.add(index, value);
          onChanged();
        } else {
          cntnLocalizationStatusesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public Builder addCntnLocalizationStatuses(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder builderForValue) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          ensureCntnLocalizationStatusesIsMutable();
          cntnLocalizationStatuses_.add(builderForValue.build());
          onChanged();
        } else {
          cntnLocalizationStatusesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public Builder addCntnLocalizationStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder builderForValue) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          ensureCntnLocalizationStatusesIsMutable();
          cntnLocalizationStatuses_.add(index, builderForValue.build());
          onChanged();
        } else {
          cntnLocalizationStatusesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public Builder addAllCntnLocalizationStatuses(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto> values) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          ensureCntnLocalizationStatusesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, cntnLocalizationStatuses_);
          onChanged();
        } else {
          cntnLocalizationStatusesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public Builder clearCntnLocalizationStatuses() {
        if (cntnLocalizationStatusesBuilder_ == null) {
          cntnLocalizationStatuses_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          cntnLocalizationStatusesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public Builder removeCntnLocalizationStatuses(int index) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          ensureCntnLocalizationStatusesIsMutable();
          cntnLocalizationStatuses_.remove(index);
          onChanged();
        } else {
          cntnLocalizationStatusesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder getCntnLocalizationStatusesBuilder(
          int index) {
        return getCntnLocalizationStatusesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder getCntnLocalizationStatusesOrBuilder(
          int index) {
        if (cntnLocalizationStatusesBuilder_ == null) {
          return cntnLocalizationStatuses_.get(index);  } else {
          return cntnLocalizationStatusesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder> 
           getCntnLocalizationStatusesOrBuilderList() {
        if (cntnLocalizationStatusesBuilder_ != null) {
          return cntnLocalizationStatusesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(cntnLocalizationStatuses_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder addCntnLocalizationStatusesBuilder() {
        return getCntnLocalizationStatusesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder addCntnLocalizationStatusesBuilder(
          int index) {
        return getCntnLocalizationStatusesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder> 
           getCntnLocalizationStatusesBuilderList() {
        return getCntnLocalizationStatusesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder> 
          getCntnLocalizationStatusesFieldBuilder() {
        if (cntnLocalizationStatusesBuilder_ == null) {
          cntnLocalizationStatusesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder>(
                  cntnLocalizationStatuses_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          cntnLocalizationStatuses_ = null;
        }
        return cntnLocalizationStatusesBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> failedRequests_ =
        java.util.Collections.emptyList();
      private void ensureFailedRequestsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          failedRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto>(failedRequests_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> getFailedRequestsList() {
        if (failedRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(failedRequests_);
        } else {
          return failedRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public int getFailedRequestsCount() {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.size();
        } else {
          return failedRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);
        } else {
          return failedRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder setFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) {
        if (failedRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, value);
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addFailedRequests(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          failedRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder addAllFailedRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto> values) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, failedRequests_);
          onChanged();
        } else {
          failedRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder clearFailedRequests() {
        if (failedRequestsBuilder_ == null) {
          failedRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          failedRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public Builder removeFailedRequests(int index) {
        if (failedRequestsBuilder_ == null) {
          ensureFailedRequestsIsMutable();
          failedRequests_.remove(index);
          onChanged();
        } else {
          failedRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder(
          int index) {
        if (failedRequestsBuilder_ == null) {
          return failedRequests_.get(index);  } else {
          return failedRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
           getFailedRequestsOrBuilderList() {
        if (failedRequestsBuilder_ != null) {
          return failedRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(failedRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() {
        return getFailedRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder(
          int index) {
        return getFailedRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder> 
           getFailedRequestsBuilderList() {
        return getFailedRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> 
          getFailedRequestsFieldBuilder() {
        if (failedRequestsBuilder_ == null) {
          failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>(
                  failedRequests_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          failedRequests_ = null;
        }
        return failedRequestsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetLocalizationStatusesResponseProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetLocalizationStatusesResponseProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetLocalizationStatusesResponseProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetLocalizationStatusesResponseProto>() {
      @java.lang.Override
      public GetLocalizationStatusesResponseProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetLocalizationStatusesResponseProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetLocalizationStatusesResponseProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface LocalizationStatusProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.LocalizationStatusProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string resource_key = 1;</code>
     * @return Whether the resourceKey field is set.
     */
    boolean hasResourceKey();
    /**
     * <code>optional string resource_key = 1;</code>
     * @return The resourceKey.
     */
    java.lang.String getResourceKey();
    /**
     * <code>optional string resource_key = 1;</code>
     * @return The bytes for resourceKey.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getResourceKeyBytes();

    /**
     * <code>optional .hadoop.yarn.LocalizationStateProto localization_state = 2;</code>
     * @return Whether the localizationState field is set.
     */
    boolean hasLocalizationState();
    /**
     * <code>optional .hadoop.yarn.LocalizationStateProto localization_state = 2;</code>
     * @return The localizationState.
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto getLocalizationState();

    /**
     * <code>optional string diagnostics = 3;</code>
     * @return Whether the diagnostics field is set.
     */
    boolean hasDiagnostics();
    /**
     * <code>optional string diagnostics = 3;</code>
     * @return The diagnostics.
     */
    java.lang.String getDiagnostics();
    /**
     * <code>optional string diagnostics = 3;</code>
     * @return The bytes for diagnostics.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.LocalizationStatusProto}
   */
  public static final class LocalizationStatusProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.LocalizationStatusProto)
      LocalizationStatusProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use LocalizationStatusProto.newBuilder() to construct.
    private LocalizationStatusProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private LocalizationStatusProto() {
      resourceKey_ = "";
      localizationState_ = 1;
      diagnostics_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new LocalizationStatusProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_LocalizationStatusProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_LocalizationStatusProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESOURCE_KEY_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object resourceKey_ = "";
    /**
     * <code>optional string resource_key = 1;</code>
     * @return Whether the resourceKey field is set.
     */
    @java.lang.Override
    public boolean hasResourceKey() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string resource_key = 1;</code>
     * @return The resourceKey.
     */
    @java.lang.Override
    public java.lang.String getResourceKey() {
      java.lang.Object ref = resourceKey_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          resourceKey_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string resource_key = 1;</code>
     * @return The bytes for resourceKey.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getResourceKeyBytes() {
      java.lang.Object ref = resourceKey_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        resourceKey_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int LOCALIZATION_STATE_FIELD_NUMBER = 2;
    private int localizationState_ = 1;
    /**
     * <code>optional .hadoop.yarn.LocalizationStateProto localization_state = 2;</code>
     * @return Whether the localizationState field is set.
     */
    @java.lang.Override public boolean hasLocalizationState() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.LocalizationStateProto localization_state = 2;</code>
     * @return The localizationState.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto getLocalizationState() {
      org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto.forNumber(localizationState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto.L_PENDING : result;
    }

    public static final int DIAGNOSTICS_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object diagnostics_ = "";
    /**
     * <code>optional string diagnostics = 3;</code>
     * @return Whether the diagnostics field is set.
     */
    @java.lang.Override
    public boolean hasDiagnostics() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string diagnostics = 3;</code>
     * @return The diagnostics.
     */
    @java.lang.Override
    public java.lang.String getDiagnostics() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnostics_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string diagnostics = 3;</code>
     * @return The bytes for diagnostics.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnostics_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, resourceKey_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeEnum(2, localizationState_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, diagnostics_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, resourceKey_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(2, localizationState_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, diagnostics_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto) obj;

      if (hasResourceKey() != other.hasResourceKey()) return false;
      if (hasResourceKey()) {
        if (!getResourceKey()
            .equals(other.getResourceKey())) return false;
      }
      if (hasLocalizationState() != other.hasLocalizationState()) return false;
      if (hasLocalizationState()) {
        if (localizationState_ != other.localizationState_) return false;
      }
      if (hasDiagnostics() != other.hasDiagnostics()) return false;
      if (hasDiagnostics()) {
        if (!getDiagnostics()
            .equals(other.getDiagnostics())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasResourceKey()) {
        hash = (37 * hash) + RESOURCE_KEY_FIELD_NUMBER;
        hash = (53 * hash) + getResourceKey().hashCode();
      }
      if (hasLocalizationState()) {
        hash = (37 * hash) + LOCALIZATION_STATE_FIELD_NUMBER;
        hash = (53 * hash) + localizationState_;
      }
      if (hasDiagnostics()) {
        hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnostics().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.LocalizationStatusProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.LocalizationStatusProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_LocalizationStatusProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_LocalizationStatusProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        resourceKey_ = "";
        localizationState_ = 1;
        diagnostics_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_LocalizationStatusProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.resourceKey_ = resourceKey_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.localizationState_ = localizationState_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.diagnostics_ = diagnostics_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.getDefaultInstance()) return this;
        if (other.hasResourceKey()) {
          resourceKey_ = other.resourceKey_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasLocalizationState()) {
          setLocalizationState(other.getLocalizationState());
        }
        if (other.hasDiagnostics()) {
          diagnostics_ = other.diagnostics_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                resourceKey_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(2, tmpRaw);
                } else {
                  localizationState_ = tmpRaw;
                  bitField0_ |= 0x00000002;
                }
                break;
              } // case 16
              case 26: {
                diagnostics_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object resourceKey_ = "";
      /**
       * <code>optional string resource_key = 1;</code>
       * @return Whether the resourceKey field is set.
       */
      public boolean hasResourceKey() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string resource_key = 1;</code>
       * @return The resourceKey.
       */
      public java.lang.String getResourceKey() {
        java.lang.Object ref = resourceKey_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            resourceKey_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string resource_key = 1;</code>
       * @return The bytes for resourceKey.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getResourceKeyBytes() {
        java.lang.Object ref = resourceKey_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          resourceKey_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string resource_key = 1;</code>
       * @param value The resourceKey to set.
       * @return This builder for chaining.
       */
      public Builder setResourceKey(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        resourceKey_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string resource_key = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearResourceKey() {
        resourceKey_ = getDefaultInstance().getResourceKey();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string resource_key = 1;</code>
       * @param value The bytes for resourceKey to set.
       * @return This builder for chaining.
       */
      public Builder setResourceKeyBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        resourceKey_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private int localizationState_ = 1;
      /**
       * <code>optional .hadoop.yarn.LocalizationStateProto localization_state = 2;</code>
       * @return Whether the localizationState field is set.
       */
      @java.lang.Override public boolean hasLocalizationState() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.LocalizationStateProto localization_state = 2;</code>
       * @return The localizationState.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto getLocalizationState() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto.forNumber(localizationState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto.L_PENDING : result;
      }
      /**
       * <code>optional .hadoop.yarn.LocalizationStateProto localization_state = 2;</code>
       * @param value The localizationState to set.
       * @return This builder for chaining.
       */
      public Builder setLocalizationState(org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        localizationState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalizationStateProto localization_state = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearLocalizationState() {
        bitField0_ = (bitField0_ & ~0x00000002);
        localizationState_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object diagnostics_ = "";
      /**
       * <code>optional string diagnostics = 3;</code>
       * @return Whether the diagnostics field is set.
       */
      public boolean hasDiagnostics() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string diagnostics = 3;</code>
       * @return The diagnostics.
       */
      public java.lang.String getDiagnostics() {
        java.lang.Object ref = diagnostics_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnostics_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 3;</code>
       * @return The bytes for diagnostics.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsBytes() {
        java.lang.Object ref = diagnostics_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnostics_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 3;</code>
       * @param value The diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnostics(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearDiagnostics() {
        diagnostics_ = getDefaultInstance().getDiagnostics();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 3;</code>
       * @param value The bytes for diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnosticsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.LocalizationStatusProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.LocalizationStatusProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<LocalizationStatusProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<LocalizationStatusProto>() {
      @java.lang.Override
      public LocalizationStatusProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<LocalizationStatusProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<LocalizationStatusProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerLocalizationStatusesProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerLocalizationStatusesProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    boolean hasContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto> 
        getLocalizationStatusesList();
    /**
     * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getLocalizationStatuses(int index);
    /**
     * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
     */
    int getLocalizationStatusesCount();
    /**
     * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder> 
        getLocalizationStatusesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder getLocalizationStatusesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerLocalizationStatusesProto}
   */
  public static final class ContainerLocalizationStatusesProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerLocalizationStatusesProto)
      ContainerLocalizationStatusesProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerLocalizationStatusesProto.newBuilder() to construct.
    private ContainerLocalizationStatusesProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerLocalizationStatusesProto() {
      localizationStatuses_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerLocalizationStatusesProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    @java.lang.Override
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int LOCALIZATION_STATUSES_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto> localizationStatuses_;
    /**
     * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto> getLocalizationStatusesList() {
      return localizationStatuses_;
    }
    /**
     * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder> 
        getLocalizationStatusesOrBuilderList() {
      return localizationStatuses_;
    }
    /**
     * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
     */
    @java.lang.Override
    public int getLocalizationStatusesCount() {
      return localizationStatuses_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getLocalizationStatuses(int index) {
      return localizationStatuses_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder getLocalizationStatusesOrBuilder(
        int index) {
      return localizationStatuses_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      for (int i = 0; i < localizationStatuses_.size(); i++) {
        output.writeMessage(2, localizationStatuses_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      for (int i = 0; i < localizationStatuses_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, localizationStatuses_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (!getLocalizationStatusesList()
          .equals(other.getLocalizationStatusesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (getLocalizationStatusesCount() > 0) {
        hash = (37 * hash) + LOCALIZATION_STATUSES_FIELD_NUMBER;
        hash = (53 * hash) + getLocalizationStatusesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerLocalizationStatusesProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerLocalizationStatusesProto)
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
          getLocalizationStatusesFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        if (localizationStatusesBuilder_ == null) {
          localizationStatuses_ = java.util.Collections.emptyList();
        } else {
          localizationStatuses_ = null;
          localizationStatusesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto build() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto result) {
        if (localizationStatusesBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            localizationStatuses_ = java.util.Collections.unmodifiableList(localizationStatuses_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.localizationStatuses_ = localizationStatuses_;
        } else {
          result.localizationStatuses_ = localizationStatusesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerId_ = containerIdBuilder_ == null
              ? containerId_
              : containerIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (localizationStatusesBuilder_ == null) {
          if (!other.localizationStatuses_.isEmpty()) {
            if (localizationStatuses_.isEmpty()) {
              localizationStatuses_ = other.localizationStatuses_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureLocalizationStatusesIsMutable();
              localizationStatuses_.addAll(other.localizationStatuses_);
            }
            onChanged();
          }
        } else {
          if (!other.localizationStatuses_.isEmpty()) {
            if (localizationStatusesBuilder_.isEmpty()) {
              localizationStatusesBuilder_.dispose();
              localizationStatusesBuilder_ = null;
              localizationStatuses_ = other.localizationStatuses_;
              bitField0_ = (bitField0_ & ~0x00000002);
              localizationStatusesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getLocalizationStatusesFieldBuilder() : null;
            } else {
              localizationStatusesBuilder_.addAllMessages(other.localizationStatuses_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.PARSER,
                        extensionRegistry);
                if (localizationStatusesBuilder_ == null) {
                  ensureLocalizationStatusesIsMutable();
                  localizationStatuses_.add(m);
                } else {
                  localizationStatusesBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return Whether the containerId field is set.
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return The containerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerId_ != null &&
            containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getContainerIdBuilder().mergeFrom(value);
          } else {
            containerId_ = value;
          }
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        if (containerId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto> localizationStatuses_ =
        java.util.Collections.emptyList();
      private void ensureLocalizationStatusesIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          localizationStatuses_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto>(localizationStatuses_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder> localizationStatusesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto> getLocalizationStatusesList() {
        if (localizationStatusesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(localizationStatuses_);
        } else {
          return localizationStatusesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public int getLocalizationStatusesCount() {
        if (localizationStatusesBuilder_ == null) {
          return localizationStatuses_.size();
        } else {
          return localizationStatusesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getLocalizationStatuses(int index) {
        if (localizationStatusesBuilder_ == null) {
          return localizationStatuses_.get(index);
        } else {
          return localizationStatusesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public Builder setLocalizationStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto value) {
        if (localizationStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocalizationStatusesIsMutable();
          localizationStatuses_.set(index, value);
          onChanged();
        } else {
          localizationStatusesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public Builder setLocalizationStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder builderForValue) {
        if (localizationStatusesBuilder_ == null) {
          ensureLocalizationStatusesIsMutable();
          localizationStatuses_.set(index, builderForValue.build());
          onChanged();
        } else {
          localizationStatusesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public Builder addLocalizationStatuses(org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto value) {
        if (localizationStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocalizationStatusesIsMutable();
          localizationStatuses_.add(value);
          onChanged();
        } else {
          localizationStatusesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public Builder addLocalizationStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto value) {
        if (localizationStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocalizationStatusesIsMutable();
          localizationStatuses_.add(index, value);
          onChanged();
        } else {
          localizationStatusesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public Builder addLocalizationStatuses(
          org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder builderForValue) {
        if (localizationStatusesBuilder_ == null) {
          ensureLocalizationStatusesIsMutable();
          localizationStatuses_.add(builderForValue.build());
          onChanged();
        } else {
          localizationStatusesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public Builder addLocalizationStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder builderForValue) {
        if (localizationStatusesBuilder_ == null) {
          ensureLocalizationStatusesIsMutable();
          localizationStatuses_.add(index, builderForValue.build());
          onChanged();
        } else {
          localizationStatusesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public Builder addAllLocalizationStatuses(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto> values) {
        if (localizationStatusesBuilder_ == null) {
          ensureLocalizationStatusesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, localizationStatuses_);
          onChanged();
        } else {
          localizationStatusesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public Builder clearLocalizationStatuses() {
        if (localizationStatusesBuilder_ == null) {
          localizationStatuses_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          localizationStatusesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public Builder removeLocalizationStatuses(int index) {
        if (localizationStatusesBuilder_ == null) {
          ensureLocalizationStatusesIsMutable();
          localizationStatuses_.remove(index);
          onChanged();
        } else {
          localizationStatusesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder getLocalizationStatusesBuilder(
          int index) {
        return getLocalizationStatusesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder getLocalizationStatusesOrBuilder(
          int index) {
        if (localizationStatusesBuilder_ == null) {
          return localizationStatuses_.get(index);  } else {
          return localizationStatusesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder> 
           getLocalizationStatusesOrBuilderList() {
        if (localizationStatusesBuilder_ != null) {
          return localizationStatusesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(localizationStatuses_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder addLocalizationStatusesBuilder() {
        return getLocalizationStatusesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder addLocalizationStatusesBuilder(
          int index) {
        return getLocalizationStatusesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder> 
           getLocalizationStatusesBuilderList() {
        return getLocalizationStatusesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder> 
          getLocalizationStatusesFieldBuilder() {
        if (localizationStatusesBuilder_ == null) {
          localizationStatusesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder>(
                  localizationStatuses_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          localizationStatuses_ = null;
        }
        return localizationStatusesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerLocalizationStatusesProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerLocalizationStatusesProto)
    private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerLocalizationStatusesProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerLocalizationStatusesProto>() {
      @java.lang.Override
      public ContainerLocalizationStatusesProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerLocalizationStatusesProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerLocalizationStatusesProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_UpdateContainerRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_UpdateContainerErrorProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_AllocateRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_AllocateRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NMTokenProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NMTokenProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_UpdatedContainerProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_UpdatedContainerProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_EnhancedHeadroomProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_EnhancedHeadroomProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_AllocateResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_AllocateResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetNewApplicationRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetNewApplicationResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetApplicationReportRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetApplicationReportResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_SubmitApplicationRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_SubmitApplicationResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_KillApplicationRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_KillApplicationResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetClusterMetricsRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetClusterMetricsResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetApplicationsRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetApplicationsResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetClusterNodesRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetClusterNodesResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetQueueInfoRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetQueueInfoResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_SignalContainerRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_SignalContainerResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetResourceProfileRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetResourceProfileResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StartContainerRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StartContainerRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StartContainerResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StartContainerResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StopContainerRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StopContainerRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StopContainerResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StopContainerResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceLocalizationRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceLocalizationResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReInitializeContainerRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReInitializeContainerResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_RestartContainerResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_RollbackResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_RollbackResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_CommitResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_CommitResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StartContainersRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StartContainersRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerExceptionMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StartContainersResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StartContainersResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StopContainersRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StopContainersRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StopContainersResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StopContainersResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetContainerStatusesRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetContainerStatusesResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerUpdateRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerUpdateResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetContainerReportRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetContainerReportResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetContainersRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetContainersRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetContainersResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetContainersResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetNewReservationRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetNewReservationResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationSubmissionRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationSubmissionResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationUpdateRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationUpdateResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationDeleteRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationDeleteResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationListRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationListRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationListResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationListResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_LocalizationStatusProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_LocalizationStatusProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_fieldAccessorTable;

  public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static  org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\031yarn_service_protos.proto\022\013hadoop.yarn" +
      "\032\016Security.proto\032\021yarn_protos.proto\"\253\001\n%" +
      "RegisterApplicationMasterRequestProto\022\014\n" +
      "\004host\030\001 \001(\t\022\020\n\010rpc_port\030\002 \001(\005\022\024\n\014trackin" +
      "g_url\030\003 \001(\t\022L\n\025placement_constraints\030\004 \003" +
      "(\0132-.hadoop.yarn.PlacementConstraintMapE" +
      "ntryProto\"\243\004\n&RegisterApplicationMasterR" +
      "esponseProto\0225\n\021maximumCapability\030\001 \001(\0132" +
      "\032.hadoop.yarn.ResourceProto\022%\n\035client_to" +
      "_am_token_master_key\030\002 \001(\014\022=\n\020applicatio" +
      "n_ACLs\030\003 \003(\0132#.hadoop.yarn.ApplicationAC" +
      "LMapProto\022F\n!containers_from_previous_at" +
      "tempts\030\004 \003(\0132\033.hadoop.yarn.ContainerProt" +
      "o\022\r\n\005queue\030\005 \001(\t\022C\n nm_tokens_from_previ" +
      "ous_attempts\030\006 \003(\0132\031.hadoop.yarn.NMToken" +
      "Proto\022E\n\030scheduler_resource_types\030\007 \003(\0162" +
      "#.hadoop.yarn.SchedulerResourceTypes\022=\n\021" +
      "resource_profiles\030\010 \001(\0132\".hadoop.yarn.Re" +
      "sourceProfilesProto\022:\n\016resource_types\030\t " +
      "\003(\0132\".hadoop.yarn.ResourceTypeInfoProto\"" +
      "\234\001\n#FinishApplicationMasterRequestProto\022" +
      "\023\n\013diagnostics\030\001 \001(\t\022\024\n\014tracking_url\030\002 \001" +
      "(\t\022J\n\030final_application_status\030\003 \001(\0162(.h" +
      "adoop.yarn.FinalApplicationStatusProto\"E" +
      "\n$FinishApplicationMasterResponseProto\022\035" +
      "\n\016isUnregistered\030\001 \001(\010:\005false\"\222\002\n\033Update" +
      "ContainerRequestProto\022\031\n\021container_versi" +
      "on\030\001 \002(\005\0223\n\014container_id\030\002 \002(\0132\035.hadoop." +
      "yarn.ContainerIdProto\022:\n\013update_type\030\003 \002" +
      "(\0162%.hadoop.yarn.ContainerUpdateTypeProt" +
      "o\022.\n\ncapability\030\004 \001(\0132\032.hadoop.yarn.Reso" +
      "urceProto\0227\n\016execution_type\030\005 \001(\0162\037.hado" +
      "op.yarn.ExecutionTypeProto\"\220\001\n\031UpdateCon" +
      "tainerErrorProto\022\016\n\006reason\030\001 \001(\t\022@\n\016upda" +
      "te_request\030\002 \001(\0132(.hadoop.yarn.UpdateCon" +
      "tainerRequestProto\022!\n\031current_container_" +
      "version\030\003 \001(\005\"\377\002\n\024AllocateRequestProto\022." +
      "\n\003ask\030\001 \003(\0132!.hadoop.yarn.ResourceReques" +
      "tProto\022.\n\007release\030\002 \003(\0132\035.hadoop.yarn.Co" +
      "ntainerIdProto\022E\n\021blacklist_request\030\003 \001(" +
      "\0132*.hadoop.yarn.ResourceBlacklistRequest" +
      "Proto\022\023\n\013response_id\030\004 \001(\005\022\020\n\010progress\030\005" +
      " \001(\002\022A\n\017update_requests\030\007 \003(\0132(.hadoop.y" +
      "arn.UpdateContainerRequestProto\022@\n\023sched" +
      "uling_requests\030\n \003(\0132#.hadoop.yarn.Sched" +
      "ulingRequestProto\022\024\n\014tracking_url\030\013 \001(\t\"" +
      "b\n\014NMTokenProto\022(\n\006nodeId\030\001 \001(\0132\030.hadoop" +
      ".yarn.NodeIdProto\022(\n\005token\030\002 \001(\0132\031.hadoo" +
      "p.common.TokenProto\"\203\001\n\025UpdatedContainer" +
      "Proto\022:\n\013update_type\030\001 \002(\0162%.hadoop.yarn" +
      ".ContainerUpdateTypeProto\022.\n\tcontainer\030\002" +
      " \002(\0132\033.hadoop.yarn.ContainerProto\"P\n\025Enh" +
      "ancedHeadroomProto\022\033\n\023total_pending_coun" +
      "t\030\001 \001(\005\022\032\n\022total_active_cores\030\002 \001(\005\"\275\007\n\025" +
      "AllocateResponseProto\0220\n\013a_m_command\030\001 \001" +
      "(\0162\033.hadoop.yarn.AMCommandProto\022\023\n\013respo" +
      "nse_id\030\002 \001(\005\0229\n\024allocated_containers\030\003 \003" +
      "(\0132\033.hadoop.yarn.ContainerProto\022G\n\034compl" +
      "eted_container_statuses\030\004 \003(\0132!.hadoop.y" +
      "arn.ContainerStatusProto\022)\n\005limit\030\005 \001(\0132" +
      "\032.hadoop.yarn.ResourceProto\0223\n\rupdated_n" +
      "odes\030\006 \003(\0132\034.hadoop.yarn.NodeReportProto" +
      "\022\031\n\021num_cluster_nodes\030\007 \001(\005\0224\n\007preempt\030\010" +
      " \001(\0132#.hadoop.yarn.PreemptionMessageProt" +
      "o\022,\n\tnm_tokens\030\t \003(\0132\031.hadoop.yarn.NMTok" +
      "enProto\022.\n\013am_rm_token\030\014 \001(\0132\031.hadoop.co" +
      "mmon.TokenProto\0228\n\024application_priority\030" +
      "\r \001(\0132\032.hadoop.yarn.PriorityProto\0227\n\016col" +
      "lector_info\030\016 \001(\0132\037.hadoop.yarn.Collecto" +
      "rInfoProto\022=\n\rupdate_errors\030\017 \003(\0132&.hado" +
      "op.yarn.UpdateContainerErrorProto\022>\n\022upd" +
      "ated_containers\030\020 \003(\0132\".hadoop.yarn.Upda" +
      "tedContainerProto\022F\n!containers_from_pre" +
      "vious_attempts\030\021 \003(\0132\033.hadoop.yarn.Conta" +
      "inerProto\022Q\n\034rejected_scheduling_request" +
      "s\030\022 \003(\0132+.hadoop.yarn.RejectedScheduling" +
      "RequestProto\022=\n\021enhanced_headroom\030\023 \001(\0132" +
      "\".hadoop.yarn.EnhancedHeadroomProto\"\037\n\035G" +
      "etNewApplicationRequestProto\"\220\001\n\036GetNewA" +
      "pplicationResponseProto\0227\n\016application_i" +
      "d\030\001 \001(\0132\037.hadoop.yarn.ApplicationIdProto" +
      "\0225\n\021maximumCapability\030\002 \001(\0132\032.hadoop.yar" +
      "n.ResourceProto\"[\n GetApplicationReportR" +
      "equestProto\0227\n\016application_id\030\001 \001(\0132\037.ha" +
      "doop.yarn.ApplicationIdProto\"d\n!GetAppli" +
      "cationReportResponseProto\022?\n\022application" +
      "_report\030\001 \001(\0132#.hadoop.yarn.ApplicationR" +
      "eportProto\"w\n\035SubmitApplicationRequestPr" +
      "oto\022V\n\036application_submission_context\030\001 " +
      "\001(\0132..hadoop.yarn.ApplicationSubmissionC" +
      "ontextProto\" \n\036SubmitApplicationResponse" +
      "Proto\"l\n\"FailApplicationAttemptRequestPr" +
      "oto\022F\n\026application_attempt_id\030\001 \001(\0132&.ha" +
      "doop.yarn.ApplicationAttemptIdProto\"%\n#F" +
      "ailApplicationAttemptResponseProto\"k\n\033Ki" +
      "llApplicationRequestProto\0227\n\016application" +
      "_id\030\001 \001(\0132\037.hadoop.yarn.ApplicationIdPro" +
      "to\022\023\n\013diagnostics\030\002 \001(\t\"@\n\034KillApplicati" +
      "onResponseProto\022 \n\021is_kill_completed\030\001 \001" +
      "(\010:\005false\"\037\n\035GetClusterMetricsRequestPro" +
      "to\"_\n\036GetClusterMetricsResponseProto\022=\n\017" +
      "cluster_metrics\030\001 \001(\0132$.hadoop.yarn.Yarn" +
      "ClusterMetricsProto\"x\n\'MoveApplicationAc" +
      "rossQueuesRequestProto\0227\n\016application_id" +
      "\030\001 \002(\0132\037.hadoop.yarn.ApplicationIdProto\022" +
      "\024\n\014target_queue\030\002 \002(\t\"*\n(MoveApplication" +
      "AcrossQueuesResponseProto\"\343\002\n\033GetApplica" +
      "tionsRequestProto\022\031\n\021application_types\030\001" +
      " \003(\t\022B\n\022application_states\030\002 \003(\0162&.hadoo" +
      "p.yarn.YarnApplicationStateProto\022\r\n\005user" +
      "s\030\003 \003(\t\022\016\n\006queues\030\004 \003(\t\022\r\n\005limit\030\005 \001(\003\022\023" +
      "\n\013start_begin\030\006 \001(\003\022\021\n\tstart_end\030\007 \001(\003\022\024" +
      "\n\014finish_begin\030\010 \001(\003\022\022\n\nfinish_end\030\t \001(\003" +
      "\022\027\n\017applicationTags\030\n \003(\t\022>\n\005scope\030\013 \001(\016" +
      "2*.hadoop.yarn.ApplicationsRequestScopeP" +
      "roto:\003ALL\022\014\n\004name\030\014 \001(\t\"Y\n\034GetApplicatio" +
      "nsResponseProto\0229\n\014applications\030\001 \003(\0132#." +
      "hadoop.yarn.ApplicationReportProto\"N\n\033Ge" +
      "tClusterNodesRequestProto\022/\n\nnodeStates\030" +
      "\001 \003(\0162\033.hadoop.yarn.NodeStateProto\"Q\n\034Ge" +
      "tClusterNodesResponseProto\0221\n\013nodeReport" +
      "s\030\001 \003(\0132\034.hadoop.yarn.NodeReportProto\"\217\001" +
      "\n\030GetQueueInfoRequestProto\022\021\n\tqueueName\030" +
      "\001 \001(\t\022\033\n\023includeApplications\030\002 \001(\010\022\032\n\022in" +
      "cludeChildQueues\030\003 \001(\010\022\021\n\trecursive\030\004 \001(" +
      "\010\022\024\n\014subClusterId\030\005 \001(\t\"K\n\031GetQueueInfoR" +
      "esponseProto\022.\n\tqueueInfo\030\001 \001(\0132\033.hadoop" +
      ".yarn.QueueInfoProto\"\"\n GetQueueUserAcls" +
      "InfoRequestProto\"^\n!GetQueueUserAclsInfo" +
      "ResponseProto\0229\n\rqueueUserAcls\030\001 \003(\0132\".h" +
      "adoop.yarn.QueueUserACLInfoProto\"\036\n\034GetN" +
      "odesToLabelsRequestProto\"W\n\035GetNodesToLa" +
      "belsResponseProto\0226\n\014nodeToLabels\030\001 \003(\0132" +
      " .hadoop.yarn.NodeIdToLabelsProto\"2\n\034Get" +
      "LabelsToNodesRequestProto\022\022\n\nnodeLabels\030" +
      "\001 \003(\t\"Y\n\035GetLabelsToNodesResponseProto\0228" +
      "\n\rlabelsToNodes\030\001 \003(\0132!.hadoop.yarn.Labe" +
      "lsToNodeIdsProto\"\"\n GetClusterNodeLabels" +
      "RequestProto\"r\n!GetClusterNodeLabelsResp" +
      "onseProto\022\034\n\024deprecatedNodeLabels\030\001 \003(\t\022" +
      "/\n\nnodeLabels\030\002 \003(\0132\033.hadoop.yarn.NodeLa" +
      "belProto\"&\n$GetClusterNodeAttributesRequ" +
      "estProto\"d\n%GetClusterNodeAttributesResp" +
      "onseProto\022;\n\016nodeAttributes\030\001 \003(\0132#.hado" +
      "op.yarn.NodeAttributeInfoProto\"^\n GetAtt" +
      "ributesToNodesRequestProto\022:\n\016nodeAttrib" +
      "utes\030\001 \003(\0132\".hadoop.yarn.NodeAttributeKe" +
      "yProto\"b\n!GetAttributesToNodesResponsePr" +
      "oto\022=\n\021attributesToNodes\030\001 \003(\0132\".hadoop." +
      "yarn.AttributeToNodesProto\"5\n GetNodesTo" +
      "AttributesRequestProto\022\021\n\thostnames\030\001 \003(" +
      "\t\"b\n!GetNodesToAttributesResponseProto\022=" +
      "\n\021nodesToAttributes\030\001 \003(\0132\".hadoop.yarn." +
      "NodeToAttributesProto\"\230\001\n%UpdateApplicat" +
      "ionPriorityRequestProto\0226\n\rapplicationId" +
      "\030\001 \002(\0132\037.hadoop.yarn.ApplicationIdProto\022" +
      "7\n\023applicationPriority\030\002 \002(\0132\032.hadoop.ya" +
      "rn.PriorityProto\"a\n&UpdateApplicationPri" +
      "orityResponseProto\0227\n\023applicationPriorit" +
      "y\030\001 \001(\0132\032.hadoop.yarn.PriorityProto\"\215\001\n\033" +
      "SignalContainerRequestProto\0223\n\014container" +
      "_id\030\001 \002(\0132\035.hadoop.yarn.ContainerIdProto" +
      "\0229\n\007command\030\002 \002(\0162(.hadoop.yarn.SignalCo" +
      "ntainerCommandProto\"\036\n\034SignalContainerRe" +
      "sponseProto\"\254\001\n%UpdateApplicationTimeout" +
      "sRequestProto\0226\n\rapplicationId\030\001 \002(\0132\037.h" +
      "adoop.yarn.ApplicationIdProto\022K\n\024applica" +
      "tion_timeouts\030\002 \003(\0132-.hadoop.yarn.Applic" +
      "ationUpdateTimeoutMapProto\"u\n&UpdateAppl" +
      "icationTimeoutsResponseProto\022K\n\024applicat" +
      "ion_timeouts\030\001 \003(\0132-.hadoop.yarn.Applica" +
      "tionUpdateTimeoutMapProto\"$\n\"GetAllResou" +
      "rceProfilesRequestProto\"d\n#GetAllResourc" +
      "eProfilesResponseProto\022=\n\021resource_profi" +
      "les\030\001 \002(\0132\".hadoop.yarn.ResourceProfiles" +
      "Proto\"1\n\036GetResourceProfileRequestProto\022" +
      "\017\n\007profile\030\001 \002(\t\"P\n\037GetResourceProfileRe" +
      "sponseProto\022-\n\tresources\030\001 \002(\0132\032.hadoop." +
      "yarn.ResourceProto\"$\n\"GetAllResourceType" +
      "InfoRequestProto\"e\n#GetAllResourceTypeIn" +
      "foResponseProto\022>\n\022resource_type_info\030\001 " +
      "\003(\0132\".hadoop.yarn.ResourceTypeInfoProto\"" +
      "\234\001\n\032StartContainerRequestProto\022J\n\030contai" +
      "ner_launch_context\030\001 \001(\0132(.hadoop.yarn.C" +
      "ontainerLaunchContextProto\0222\n\017container_" +
      "token\030\002 \001(\0132\031.hadoop.common.TokenProto\"[" +
      "\n\033StartContainerResponseProto\022<\n\022service" +
      "s_meta_data\030\001 \003(\0132 .hadoop.yarn.StringBy" +
      "tesMapProto\"P\n\031StopContainerRequestProto" +
      "\0223\n\014container_id\030\001 \001(\0132\035.hadoop.yarn.Con" +
      "tainerIdProto\"\034\n\032StopContainerResponsePr" +
      "oto\"\232\001\n ResourceLocalizationRequestProto" +
      "\0223\n\014container_id\030\001 \001(\0132\035.hadoop.yarn.Con" +
      "tainerIdProto\022A\n\017local_resources\030\002 \003(\0132(" +
      ".hadoop.yarn.StringLocalResourceMapProto" +
      "\"#\n!ResourceLocalizationResponseProto\"\277\001" +
      "\n!ReInitializeContainerRequestProto\0223\n\014c" +
      "ontainer_id\030\001 \001(\0132\035.hadoop.yarn.Containe" +
      "rIdProto\022J\n\030container_launch_context\030\002 \001" +
      "(\0132(.hadoop.yarn.ContainerLaunchContextP" +
      "roto\022\031\n\013auto_commit\030\003 \001(\010:\004true\"$\n\"ReIni" +
      "tializeContainerResponseProto\"\037\n\035Restart" +
      "ContainerResponseProto\"\027\n\025RollbackRespon" +
      "seProto\"\025\n\023CommitResponseProto\"g\n\033StartC" +
      "ontainersRequestProto\022H\n\027start_container" +
      "_request\030\001 \003(\0132\'.hadoop.yarn.StartContai" +
      "nerRequestProto\"\213\001\n\032ContainerExceptionMa" +
      "pProto\0223\n\014container_id\030\001 \001(\0132\035.hadoop.ya" +
      "rn.ContainerIdProto\0228\n\texception\030\002 \001(\0132%" +
      ".hadoop.yarn.SerializedExceptionProto\"\331\001" +
      "\n\034StartContainersResponseProto\022<\n\022servic" +
      "es_meta_data\030\001 \003(\0132 .hadoop.yarn.StringB" +
      "ytesMapProto\0229\n\022succeeded_requests\030\002 \003(\013" +
      "2\035.hadoop.yarn.ContainerIdProto\022@\n\017faile" +
      "d_requests\030\003 \003(\0132\'.hadoop.yarn.Container" +
      "ExceptionMapProto\"Q\n\032StopContainersReque" +
      "stProto\0223\n\014container_id\030\001 \003(\0132\035.hadoop.y" +
      "arn.ContainerIdProto\"\232\001\n\033StopContainersR" +
      "esponseProto\0229\n\022succeeded_requests\030\001 \003(\013" +
      "2\035.hadoop.yarn.ContainerIdProto\022@\n\017faile" +
      "d_requests\030\002 \003(\0132\'.hadoop.yarn.Container" +
      "ExceptionMapProto\"W\n GetContainerStatuse" +
      "sRequestProto\0223\n\014container_id\030\001 \003(\0132\035.ha" +
      "doop.yarn.ContainerIdProto\"\230\001\n!GetContai" +
      "nerStatusesResponseProto\0221\n\006status\030\001 \003(\013" +
      "2!.hadoop.yarn.ContainerStatusProto\022@\n\017f" +
      "ailed_requests\030\002 \003(\0132\'.hadoop.yarn.Conta" +
      "inerExceptionMapProto\"`\n&IncreaseContain" +
      "ersResourceRequestProto\0226\n\023increase_cont" +
      "ainers\030\001 \003(\0132\031.hadoop.common.TokenProto\"" +
      "\246\001\n\'IncreaseContainersResourceResponsePr" +
      "oto\0229\n\022succeeded_requests\030\001 \003(\0132\035.hadoop" +
      ".yarn.ContainerIdProto\022@\n\017failed_request" +
      "s\030\002 \003(\0132\'.hadoop.yarn.ContainerException" +
      "MapProto\"X\n\033ContainerUpdateRequestProto\022" +
      "9\n\026update_container_token\030\001 \003(\0132\031.hadoop" +
      ".common.TokenProto\"\233\001\n\034ContainerUpdateRe" +
      "sponseProto\0229\n\022succeeded_requests\030\001 \003(\0132" +
      "\035.hadoop.yarn.ContainerIdProto\022@\n\017failed" +
      "_requests\030\002 \003(\0132\'.hadoop.yarn.ContainerE" +
      "xceptionMapProto\"q\n\'GetApplicationAttemp" +
      "tReportRequestProto\022F\n\026application_attem" +
      "pt_id\030\001 \001(\0132&.hadoop.yarn.ApplicationAtt" +
      "emptIdProto\"z\n(GetApplicationAttemptRepo" +
      "rtResponseProto\022N\n\032application_attempt_r" +
      "eport\030\001 \001(\0132*.hadoop.yarn.ApplicationAtt" +
      "emptReportProto\"]\n\"GetApplicationAttempt" +
      "sRequestProto\0227\n\016application_id\030\001 \001(\0132\037." +
      "hadoop.yarn.ApplicationIdProto\"o\n#GetApp" +
      "licationAttemptsResponseProto\022H\n\024applica" +
      "tion_attempts\030\001 \003(\0132*.hadoop.yarn.Applic" +
      "ationAttemptReportProto\"U\n\036GetContainerR" +
      "eportRequestProto\0223\n\014container_id\030\001 \001(\0132" +
      "\035.hadoop.yarn.ContainerIdProto\"^\n\037GetCon" +
      "tainerReportResponseProto\022;\n\020container_r" +
      "eport\030\001 \001(\0132!.hadoop.yarn.ContainerRepor" +
      "tProto\"c\n\031GetContainersRequestProto\022F\n\026a" +
      "pplication_attempt_id\030\001 \001(\0132&.hadoop.yar" +
      "n.ApplicationAttemptIdProto\"S\n\032GetContai" +
      "nersResponseProto\0225\n\ncontainers\030\001 \003(\0132!." +
      "hadoop.yarn.ContainerReportProto\"q\n\"UseS" +
      "haredCacheResourceRequestProto\0226\n\rapplic" +
      "ationId\030\001 \001(\0132\037.hadoop.yarn.ApplicationI" +
      "dProto\022\023\n\013resourceKey\030\002 \001(\t\"3\n#UseShared" +
      "CacheResourceResponseProto\022\014\n\004path\030\001 \001(\t" +
      "\"u\n&ReleaseSharedCacheResourceRequestPro" +
      "to\0226\n\rapplicationId\030\001 \001(\0132\037.hadoop.yarn." +
      "ApplicationIdProto\022\023\n\013resourceKey\030\002 \001(\t\"" +
      ")\n\'ReleaseSharedCacheResourceResponsePro" +
      "to\"\037\n\035GetNewReservationRequestProto\"Y\n\036G" +
      "etNewReservationResponseProto\0227\n\016reserva" +
      "tion_id\030\001 \001(\0132\037.hadoop.yarn.ReservationI" +
      "dProto\"\264\001\n!ReservationSubmissionRequestP" +
      "roto\022\r\n\005queue\030\001 \001(\t\022G\n\026reservation_defin" +
      "ition\030\002 \001(\0132\'.hadoop.yarn.ReservationDef" +
      "initionProto\0227\n\016reservation_id\030\003 \001(\0132\037.h" +
      "adoop.yarn.ReservationIdProto\"$\n\"Reserva" +
      "tionSubmissionResponseProto\"\241\001\n\035Reservat" +
      "ionUpdateRequestProto\022G\n\026reservation_def" +
      "inition\030\001 \001(\0132\'.hadoop.yarn.ReservationD" +
      "efinitionProto\0227\n\016reservation_id\030\002 \001(\0132\037" +
      ".hadoop.yarn.ReservationIdProto\" \n\036Reser" +
      "vationUpdateResponseProto\"X\n\035Reservation" +
      "DeleteRequestProto\0227\n\016reservation_id\030\001 \001" +
      "(\0132\037.hadoop.yarn.ReservationIdProto\" \n\036R" +
      "eservationDeleteResponseProto\"\220\001\n\033Reserv" +
      "ationListRequestProto\022\r\n\005queue\030\001 \001(\t\022\026\n\016" +
      "reservation_id\030\003 \001(\t\022\022\n\nstart_time\030\004 \001(\003" +
      "\022\020\n\010end_time\030\005 \001(\003\022$\n\034include_resource_a" +
      "llocations\030\006 \001(\010\"b\n\034ReservationListRespo" +
      "nseProto\022B\n\014reservations\030\001 \003(\0132,.hadoop." +
      "yarn.ReservationAllocationStateProto\"\'\n%" +
      "RunSharedCacheCleanerTaskRequestProto\":\n" +
      "&RunSharedCacheCleanerTaskResponseProto\022" +
      "\020\n\010accepted\030\001 \001(\010\"Z\n#GetLocalizationStat" +
      "usesRequestProto\0223\n\014container_id\030\001 \003(\0132\035" +
      ".hadoop.yarn.ContainerIdProto\"\275\001\n$GetLoc" +
      "alizationStatusesResponseProto\022S\n\032cntn_l" +
      "ocalization_statuses\030\001 \003(\0132/.hadoop.yarn" +
      ".ContainerLocalizationStatusesProto\022@\n\017f" +
      "ailed_requests\030\002 \003(\0132\'.hadoop.yarn.Conta" +
      "inerExceptionMapProto\"\205\001\n\027LocalizationSt" +
      "atusProto\022\024\n\014resource_key\030\001 \001(\t\022?\n\022local" +
      "ization_state\030\002 \001(\0162#.hadoop.yarn.Locali" +
      "zationStateProto\022\023\n\013diagnostics\030\003 \001(\t\"\236\001" +
      "\n\"ContainerLocalizationStatusesProto\0223\n\014" +
      "container_id\030\001 \001(\0132\035.hadoop.yarn.Contain" +
      "erIdProto\022C\n\025localization_statuses\030\002 \003(\013" +
      "2$.hadoop.yarn.LocalizationStatusProto*\177" +
      "\n\030ContainerUpdateTypeProto\022\025\n\021INCREASE_R" +
      "ESOURCE\020\000\022\025\n\021DECREASE_RESOURCE\020\001\022\032\n\026PROM" +
      "OTE_EXECUTION_TYPE\020\002\022\031\n\025DEMOTE_EXECUTION" +
      "_TYPE\020\003*-\n\026SchedulerResourceTypes\022\n\n\006MEM" +
      "ORY\020\000\022\007\n\003CPU\020\001*?\n\035ApplicationsRequestSco" +
      "peProto\022\007\n\003ALL\020\000\022\014\n\010VIEWABLE\020\001\022\007\n\003OWN\020\002*" +
      "F\n\026LocalizationStateProto\022\r\n\tL_PENDING\020\001" +
      "\022\017\n\013L_COMPLETED\020\002\022\014\n\010L_FAILED\020\003B7\n\034org.a" +
      "pache.hadoop.yarn.protoB\021YarnServiceProt" +
      "os\210\001\001\240\001\001"
    };
    descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.security.proto.SecurityProtos.getDescriptor(),
          org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(),
        });
    internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(0);
    internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor,
        new java.lang.String[] { "Host", "RpcPort", "TrackingUrl", "PlacementConstraints", });
    internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(1);
    internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor,
        new java.lang.String[] { "MaximumCapability", "ClientToAmTokenMasterKey", "ApplicationACLs", "ContainersFromPreviousAttempts", "Queue", "NmTokensFromPreviousAttempts", "SchedulerResourceTypes", "ResourceProfiles", "ResourceTypes", });
    internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(2);
    internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor,
        new java.lang.String[] { "Diagnostics", "TrackingUrl", "FinalApplicationStatus", });
    internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(3);
    internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor,
        new java.lang.String[] { "IsUnregistered", });
    internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(4);
    internal_static_hadoop_yarn_UpdateContainerRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor,
        new java.lang.String[] { "ContainerVersion", "ContainerId", "UpdateType", "Capability", "ExecutionType", });
    internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor =
      getDescriptor().getMessageTypes().get(5);
    internal_static_hadoop_yarn_UpdateContainerErrorProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor,
        new java.lang.String[] { "Reason", "UpdateRequest", "CurrentContainerVersion", });
    internal_static_hadoop_yarn_AllocateRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(6);
    internal_static_hadoop_yarn_AllocateRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_AllocateRequestProto_descriptor,
        new java.lang.String[] { "Ask", "Release", "BlacklistRequest", "ResponseId", "Progress", "UpdateRequests", "SchedulingRequests", "TrackingUrl", });
    internal_static_hadoop_yarn_NMTokenProto_descriptor =
      getDescriptor().getMessageTypes().get(7);
    internal_static_hadoop_yarn_NMTokenProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NMTokenProto_descriptor,
        new java.lang.String[] { "NodeId", "Token", });
    internal_static_hadoop_yarn_UpdatedContainerProto_descriptor =
      getDescriptor().getMessageTypes().get(8);
    internal_static_hadoop_yarn_UpdatedContainerProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_UpdatedContainerProto_descriptor,
        new java.lang.String[] { "UpdateType", "Container", });
    internal_static_hadoop_yarn_EnhancedHeadroomProto_descriptor =
      getDescriptor().getMessageTypes().get(9);
    internal_static_hadoop_yarn_EnhancedHeadroomProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_EnhancedHeadroomProto_descriptor,
        new java.lang.String[] { "TotalPendingCount", "TotalActiveCores", });
    internal_static_hadoop_yarn_AllocateResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(10);
    internal_static_hadoop_yarn_AllocateResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_AllocateResponseProto_descriptor,
        new java.lang.String[] { "AMCommand", "ResponseId", "AllocatedContainers", "CompletedContainerStatuses", "Limit", "UpdatedNodes", "NumClusterNodes", "Preempt", "NmTokens", "AmRmToken", "ApplicationPriority", "CollectorInfo", "UpdateErrors", "UpdatedContainers", "ContainersFromPreviousAttempts", "RejectedSchedulingRequests", "EnhancedHeadroom", });
    internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(11);
    internal_static_hadoop_yarn_GetNewApplicationRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(12);
    internal_static_hadoop_yarn_GetNewApplicationResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor,
        new java.lang.String[] { "ApplicationId", "MaximumCapability", });
    internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(13);
    internal_static_hadoop_yarn_GetApplicationReportRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor,
        new java.lang.String[] { "ApplicationId", });
    internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(14);
    internal_static_hadoop_yarn_GetApplicationReportResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor,
        new java.lang.String[] { "ApplicationReport", });
    internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(15);
    internal_static_hadoop_yarn_SubmitApplicationRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor,
        new java.lang.String[] { "ApplicationSubmissionContext", });
    internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(16);
    internal_static_hadoop_yarn_SubmitApplicationResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(17);
    internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor,
        new java.lang.String[] { "ApplicationAttemptId", });
    internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(18);
    internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(19);
    internal_static_hadoop_yarn_KillApplicationRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor,
        new java.lang.String[] { "ApplicationId", "Diagnostics", });
    internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(20);
    internal_static_hadoop_yarn_KillApplicationResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor,
        new java.lang.String[] { "IsKillCompleted", });
    internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(21);
    internal_static_hadoop_yarn_GetClusterMetricsRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(22);
    internal_static_hadoop_yarn_GetClusterMetricsResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor,
        new java.lang.String[] { "ClusterMetrics", });
    internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(23);
    internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor,
        new java.lang.String[] { "ApplicationId", "TargetQueue", });
    internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(24);
    internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(25);
    internal_static_hadoop_yarn_GetApplicationsRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor,
        new java.lang.String[] { "ApplicationTypes", "ApplicationStates", "Users", "Queues", "Limit", "StartBegin", "StartEnd", "FinishBegin", "FinishEnd", "ApplicationTags", "Scope", "Name", });
    internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(26);
    internal_static_hadoop_yarn_GetApplicationsResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor,
        new java.lang.String[] { "Applications", });
    internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(27);
    internal_static_hadoop_yarn_GetClusterNodesRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor,
        new java.lang.String[] { "NodeStates", });
    internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(28);
    internal_static_hadoop_yarn_GetClusterNodesResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor,
        new java.lang.String[] { "NodeReports", });
    internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(29);
    internal_static_hadoop_yarn_GetQueueInfoRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor,
        new java.lang.String[] { "QueueName", "IncludeApplications", "IncludeChildQueues", "Recursive", "SubClusterId", });
    internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(30);
    internal_static_hadoop_yarn_GetQueueInfoResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor,
        new java.lang.String[] { "QueueInfo", });
    internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(31);
    internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(32);
    internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor,
        new java.lang.String[] { "QueueUserAcls", });
    internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(33);
    internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(34);
    internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor,
        new java.lang.String[] { "NodeToLabels", });
    internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(35);
    internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor,
        new java.lang.String[] { "NodeLabels", });
    internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(36);
    internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor,
        new java.lang.String[] { "LabelsToNodes", });
    internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(37);
    internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(38);
    internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor,
        new java.lang.String[] { "DeprecatedNodeLabels", "NodeLabels", });
    internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(39);
    internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(40);
    internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor,
        new java.lang.String[] { "NodeAttributes", });
    internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(41);
    internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor,
        new java.lang.String[] { "NodeAttributes", });
    internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(42);
    internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor,
        new java.lang.String[] { "AttributesToNodes", });
    internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(43);
    internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor,
        new java.lang.String[] { "Hostnames", });
    internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(44);
    internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor,
        new java.lang.String[] { "NodesToAttributes", });
    internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(45);
    internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor,
        new java.lang.String[] { "ApplicationId", "ApplicationPriority", });
    internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(46);
    internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor,
        new java.lang.String[] { "ApplicationPriority", });
    internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(47);
    internal_static_hadoop_yarn_SignalContainerRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor,
        new java.lang.String[] { "ContainerId", "Command", });
    internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(48);
    internal_static_hadoop_yarn_SignalContainerResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(49);
    internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor,
        new java.lang.String[] { "ApplicationId", "ApplicationTimeouts", });
    internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(50);
    internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor,
        new java.lang.String[] { "ApplicationTimeouts", });
    internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(51);
    internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(52);
    internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor,
        new java.lang.String[] { "ResourceProfiles", });
    internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(53);
    internal_static_hadoop_yarn_GetResourceProfileRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor,
        new java.lang.String[] { "Profile", });
    internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(54);
    internal_static_hadoop_yarn_GetResourceProfileResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor,
        new java.lang.String[] { "Resources", });
    internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(55);
    internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(56);
    internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor,
        new java.lang.String[] { "ResourceTypeInfo", });
    internal_static_hadoop_yarn_StartContainerRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(57);
    internal_static_hadoop_yarn_StartContainerRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StartContainerRequestProto_descriptor,
        new java.lang.String[] { "ContainerLaunchContext", "ContainerToken", });
    internal_static_hadoop_yarn_StartContainerResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(58);
    internal_static_hadoop_yarn_StartContainerResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StartContainerResponseProto_descriptor,
        new java.lang.String[] { "ServicesMetaData", });
    internal_static_hadoop_yarn_StopContainerRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(59);
    internal_static_hadoop_yarn_StopContainerRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StopContainerRequestProto_descriptor,
        new java.lang.String[] { "ContainerId", });
    internal_static_hadoop_yarn_StopContainerResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(60);
    internal_static_hadoop_yarn_StopContainerResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StopContainerResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(61);
    internal_static_hadoop_yarn_ResourceLocalizationRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor,
        new java.lang.String[] { "ContainerId", "LocalResources", });
    internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(62);
    internal_static_hadoop_yarn_ResourceLocalizationResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(63);
    internal_static_hadoop_yarn_ReInitializeContainerRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor,
        new java.lang.String[] { "ContainerId", "ContainerLaunchContext", "AutoCommit", });
    internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(64);
    internal_static_hadoop_yarn_ReInitializeContainerResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(65);
    internal_static_hadoop_yarn_RestartContainerResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_RollbackResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(66);
    internal_static_hadoop_yarn_RollbackResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_RollbackResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_CommitResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(67);
    internal_static_hadoop_yarn_CommitResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_CommitResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_StartContainersRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(68);
    internal_static_hadoop_yarn_StartContainersRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StartContainersRequestProto_descriptor,
        new java.lang.String[] { "StartContainerRequest", });
    internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor =
      getDescriptor().getMessageTypes().get(69);
    internal_static_hadoop_yarn_ContainerExceptionMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor,
        new java.lang.String[] { "ContainerId", "Exception", });
    internal_static_hadoop_yarn_StartContainersResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(70);
    internal_static_hadoop_yarn_StartContainersResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StartContainersResponseProto_descriptor,
        new java.lang.String[] { "ServicesMetaData", "SucceededRequests", "FailedRequests", });
    internal_static_hadoop_yarn_StopContainersRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(71);
    internal_static_hadoop_yarn_StopContainersRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StopContainersRequestProto_descriptor,
        new java.lang.String[] { "ContainerId", });
    internal_static_hadoop_yarn_StopContainersResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(72);
    internal_static_hadoop_yarn_StopContainersResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StopContainersResponseProto_descriptor,
        new java.lang.String[] { "SucceededRequests", "FailedRequests", });
    internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(73);
    internal_static_hadoop_yarn_GetContainerStatusesRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor,
        new java.lang.String[] { "ContainerId", });
    internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(74);
    internal_static_hadoop_yarn_GetContainerStatusesResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor,
        new java.lang.String[] { "Status", "FailedRequests", });
    internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(75);
    internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor,
        new java.lang.String[] { "IncreaseContainers", });
    internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(76);
    internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor,
        new java.lang.String[] { "SucceededRequests", "FailedRequests", });
    internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(77);
    internal_static_hadoop_yarn_ContainerUpdateRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor,
        new java.lang.String[] { "UpdateContainerToken", });
    internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(78);
    internal_static_hadoop_yarn_ContainerUpdateResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor,
        new java.lang.String[] { "SucceededRequests", "FailedRequests", });
    internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(79);
    internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor,
        new java.lang.String[] { "ApplicationAttemptId", });
    internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(80);
    internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor,
        new java.lang.String[] { "ApplicationAttemptReport", });
    internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(81);
    internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor,
        new java.lang.String[] { "ApplicationId", });
    internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(82);
    internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor,
        new java.lang.String[] { "ApplicationAttempts", });
    internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(83);
    internal_static_hadoop_yarn_GetContainerReportRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor,
        new java.lang.String[] { "ContainerId", });
    internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(84);
    internal_static_hadoop_yarn_GetContainerReportResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor,
        new java.lang.String[] { "ContainerReport", });
    internal_static_hadoop_yarn_GetContainersRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(85);
    internal_static_hadoop_yarn_GetContainersRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetContainersRequestProto_descriptor,
        new java.lang.String[] { "ApplicationAttemptId", });
    internal_static_hadoop_yarn_GetContainersResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(86);
    internal_static_hadoop_yarn_GetContainersResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetContainersResponseProto_descriptor,
        new java.lang.String[] { "Containers", });
    internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(87);
    internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor,
        new java.lang.String[] { "ApplicationId", "ResourceKey", });
    internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(88);
    internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor,
        new java.lang.String[] { "Path", });
    internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(89);
    internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor,
        new java.lang.String[] { "ApplicationId", "ResourceKey", });
    internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(90);
    internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(91);
    internal_static_hadoop_yarn_GetNewReservationRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(92);
    internal_static_hadoop_yarn_GetNewReservationResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor,
        new java.lang.String[] { "ReservationId", });
    internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(93);
    internal_static_hadoop_yarn_ReservationSubmissionRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor,
        new java.lang.String[] { "Queue", "ReservationDefinition", "ReservationId", });
    internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(94);
    internal_static_hadoop_yarn_ReservationSubmissionResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(95);
    internal_static_hadoop_yarn_ReservationUpdateRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor,
        new java.lang.String[] { "ReservationDefinition", "ReservationId", });
    internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(96);
    internal_static_hadoop_yarn_ReservationUpdateResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(97);
    internal_static_hadoop_yarn_ReservationDeleteRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor,
        new java.lang.String[] { "ReservationId", });
    internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(98);
    internal_static_hadoop_yarn_ReservationDeleteResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_ReservationListRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(99);
    internal_static_hadoop_yarn_ReservationListRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationListRequestProto_descriptor,
        new java.lang.String[] { "Queue", "ReservationId", "StartTime", "EndTime", "IncludeResourceAllocations", });
    internal_static_hadoop_yarn_ReservationListResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(100);
    internal_static_hadoop_yarn_ReservationListResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationListResponseProto_descriptor,
        new java.lang.String[] { "Reservations", });
    internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(101);
    internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(102);
    internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor,
        new java.lang.String[] { "Accepted", });
    internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(103);
    internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor,
        new java.lang.String[] { "ContainerId", });
    internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor =
      getDescriptor().getMessageTypes().get(104);
    internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor,
        new java.lang.String[] { "CntnLocalizationStatuses", "FailedRequests", });
    internal_static_hadoop_yarn_LocalizationStatusProto_descriptor =
      getDescriptor().getMessageTypes().get(105);
    internal_static_hadoop_yarn_LocalizationStatusProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_LocalizationStatusProto_descriptor,
        new java.lang.String[] { "ResourceKey", "LocalizationState", "Diagnostics", });
    internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor =
      getDescriptor().getMessageTypes().get(106);
    internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor,
        new java.lang.String[] { "ContainerId", "LocalizationStatuses", });
    org.apache.hadoop.security.proto.SecurityProtos.getDescriptor();
    org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor();
  }

  // @@protoc_insertion_point(outer_class_scope)
}